diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3075904 --- /dev/null +++ b/.gitignore @@ -0,0 +1,127 @@ +# .gitignore template by: https://gist.github.com/GhostofGoes/94580e76cd251972b15b4821c8a06f59#file-gitignore + +# Editors +.vscode/ +.idea/ + +# Vagrant +.vagrant/ + +# Mac/OSX +.DS_Store + +# Windows +Thumbs.db + +# Source for the following rules: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 5c80254..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "[python]": { - "editor.defaultFormatter": "ms-python.autopep8" - }, - "python.formatting.provider": "none" -} diff --git a/__pycache__/types.cpython-311.pyc b/__pycache__/types.cpython-311.pyc deleted file mode 100644 index 72d9b0c..0000000 Binary files a/__pycache__/types.cpython-311.pyc and /dev/null differ diff --git a/__pycache__/typings.cpython-311.pyc b/__pycache__/typings.cpython-311.pyc deleted file mode 100644 index 1ae8753..0000000 Binary files a/__pycache__/typings.cpython-311.pyc and /dev/null differ diff --git a/__pycache__/utils.cpython-311.pyc b/__pycache__/utils.cpython-311.pyc deleted file mode 100644 index e681ead..0000000 Binary files a/__pycache__/utils.cpython-311.pyc and /dev/null differ diff --git a/main.py b/main.py index e410070..7e9c34c 100644 --- a/main.py +++ b/main.py @@ -114,4 +114,4 @@ def handle_disconnect(): if __name__ == '__main__': - socketio.run(app, debug=True) + socketio.run(app, debug=True, allow_unsafe_werkzeug=True) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..d41d3b7 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +Flask==2.3.3 +flask-socketio \ No newline at end of file diff --git a/venv_flaskchat/bin/Activate.ps1 b/venv_flaskchat/bin/Activate.ps1 deleted file mode 100644 index b49d77b..0000000 --- a/venv_flaskchat/bin/Activate.ps1 +++ /dev/null @@ -1,247 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove VIRTUAL_ENV_PROMPT altogether. - if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { - Remove-Item -Path env:VIRTUAL_ENV_PROMPT - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } - $env:VIRTUAL_ENV_PROMPT = $Prompt -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv_flaskchat/bin/activate b/venv_flaskchat/bin/activate deleted file mode 100644 index 344cbd3..0000000 --- a/venv_flaskchat/bin/activate +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null - fi - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - unset VIRTUAL_ENV_PROMPT - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1="(venv_flaskchat) ${PS1:-}" - export PS1 - VIRTUAL_ENV_PROMPT="(venv_flaskchat) " - export VIRTUAL_ENV_PROMPT -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null -fi diff --git a/venv_flaskchat/bin/activate.csh b/venv_flaskchat/bin/activate.csh deleted file mode 100644 index e256bfe..0000000 --- a/venv_flaskchat/bin/activate.csh +++ /dev/null @@ -1,26 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - set prompt = "(venv_flaskchat) $prompt" - setenv VIRTUAL_ENV_PROMPT "(venv_flaskchat) " -endif - -alias pydoc python -m pydoc - -rehash diff --git a/venv_flaskchat/bin/activate.fish b/venv_flaskchat/bin/activate.fish deleted file mode 100644 index ef15994..0000000 --- a/venv_flaskchat/bin/activate.fish +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source /bin/activate.fish" *from fish* -# (https://fishshell.com/); you cannot run it directly. - -function deactivate -d "Exit virtual environment and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - set -e _OLD_FISH_PROMPT_OVERRIDE - # prevents error when using nested fish instances (Issue #93858) - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - end - - set -e VIRTUAL_ENV - set -e VIRTUAL_ENV_PROMPT - if test "$argv[1]" != "nondestructive" - # Self-destruct! - functions -e deactivate - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV "/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# Unset PYTHONHOME if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # Save the current fish_prompt function as the function _old_fish_prompt. - functions -c fish_prompt _old_fish_prompt - - # With the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command. - set -l old_status $status - - # Output the venv prompt; color taken from the blue of the Python logo. - printf "%s%s%s" (set_color 4B8BBE) "(venv_flaskchat) " (set_color normal) - - # Restore the return status of the previous command. - echo "exit $old_status" | . - # Output the original/"old" prompt. - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - set -gx VIRTUAL_ENV_PROMPT "(venv_flaskchat) " -end diff --git a/venv_flaskchat/bin/flask b/venv_flaskchat/bin/flask deleted file mode 100755 index 9b3eb74..0000000 --- a/venv_flaskchat/bin/flask +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat/bin/python3.11 -# -*- coding: utf-8 -*- -import re -import sys -from flask.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv_flaskchat/bin/pip b/venv_flaskchat/bin/pip deleted file mode 100755 index cff674d..0000000 --- a/venv_flaskchat/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat/bin/python3.11 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv_flaskchat/bin/pip3 b/venv_flaskchat/bin/pip3 deleted file mode 100755 index cff674d..0000000 --- a/venv_flaskchat/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat/bin/python3.11 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv_flaskchat/bin/pip3.11 b/venv_flaskchat/bin/pip3.11 deleted file mode 100755 index cff674d..0000000 --- a/venv_flaskchat/bin/pip3.11 +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/menardmaranan/Movies/FlaskChat/venv_flaskchat/bin/python3.11 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv_flaskchat/bin/python b/venv_flaskchat/bin/python deleted file mode 120000 index 6e7f3c7..0000000 --- a/venv_flaskchat/bin/python +++ /dev/null @@ -1 +0,0 @@ -python3.11 \ No newline at end of file diff --git a/venv_flaskchat/bin/python3 b/venv_flaskchat/bin/python3 deleted file mode 120000 index 6e7f3c7..0000000 --- a/venv_flaskchat/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -python3.11 \ No newline at end of file diff --git a/venv_flaskchat/bin/python3.11 b/venv_flaskchat/bin/python3.11 deleted file mode 120000 index 3cf1fbd..0000000 --- a/venv_flaskchat/bin/python3.11 +++ /dev/null @@ -1 +0,0 @@ -/opt/homebrew/opt/python@3.11/bin/python3.11 \ No newline at end of file diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/METADATA deleted file mode 100644 index a99e52f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/METADATA +++ /dev/null @@ -1,118 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask -Version: 2.3.2 -Summary: A simple framework for building complex web applications. -Author-email: Armin Ronacher -Maintainer-email: Pallets -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Changes, https://flask.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/flask/ -Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: Werkzeug (>=2.3.3) -Requires-Dist: Jinja2 (>=3.1.2) -Requires-Dist: itsdangerous (>=2.1.2) -Requires-Dist: click (>=8.1.3) -Requires-Dist: blinker (>=1.6.2) -Requires-Dist: importlib-metadata (>=3.6.0) ; python_version < "3.10" -Provides-Extra: async -Requires-Dist: asgiref (>=3.2) ; extra == 'async' -Provides-Extra: dotenv -Requires-Dist: python-dotenv ; extra == 'dotenv' - -Flask -===== - -Flask is a lightweight `WSGI`_ web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around `Werkzeug`_ -and `Jinja`_ and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -.. _WSGI: https://wsgi.readthedocs.io/ -.. _Werkzeug: https://werkzeug.palletsprojects.com/ -.. _Jinja: https://jinja.palletsprojects.com/ - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Flask - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - # save this as app.py - from flask import Flask - - app = Flask(__name__) - - @app.route("/") - def hello(): - return "Hello, World!" - -.. code-block:: text - - $ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) - - -Contributing ------------- - -For guidance on setting up a development environment and how to make a -contribution to Flask, see the `contributing guidelines`_. - -.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst - - -Donate ------- - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://flask.palletsprojects.com/ -- Changes: https://flask.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Flask/ -- Source Code: https://github.com/pallets/flask/ -- Issue Tracker: https://github.com/pallets/flask/issues/ -- Chat: https://discord.gg/pallets diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/RECORD deleted file mode 100644 index 1ef5bd6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/RECORD +++ /dev/null @@ -1,54 +0,0 @@ -../../../bin/flask,sha256=LTILJWQHsFFcRMLJYN4iezvqldZlBm3BlUbjwSLF9Bo,259 -Flask-2.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask-2.3.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -Flask-2.3.2.dist-info/METADATA,sha256=o20FsyHfhQR8TMWB_QrtQN2PHyzacLRUAgol_quBBvA,3716 -Flask-2.3.2.dist-info/RECORD,, -Flask-2.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask-2.3.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -Flask-2.3.2.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41 -Flask-2.3.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 -flask/__init__.py,sha256=yeirfdSGPoM3Ylc9FWWJfy2gEQlHfiZCKrxBiPefACM,3731 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-311.pyc,, -flask/__pycache__/__main__.cpython-311.pyc,, -flask/__pycache__/app.cpython-311.pyc,, -flask/__pycache__/blueprints.cpython-311.pyc,, -flask/__pycache__/cli.cpython-311.pyc,, -flask/__pycache__/config.cpython-311.pyc,, -flask/__pycache__/ctx.cpython-311.pyc,, -flask/__pycache__/debughelpers.cpython-311.pyc,, -flask/__pycache__/globals.cpython-311.pyc,, -flask/__pycache__/helpers.cpython-311.pyc,, -flask/__pycache__/logging.cpython-311.pyc,, -flask/__pycache__/scaffold.cpython-311.pyc,, -flask/__pycache__/sessions.cpython-311.pyc,, -flask/__pycache__/signals.cpython-311.pyc,, -flask/__pycache__/templating.cpython-311.pyc,, -flask/__pycache__/testing.cpython-311.pyc,, -flask/__pycache__/typing.cpython-311.pyc,, -flask/__pycache__/views.cpython-311.pyc,, -flask/__pycache__/wrappers.cpython-311.pyc,, -flask/app.py,sha256=ht3Qx9U9z0I1qUfLoS7bYhJcubdpk-i54eHq37LDlN8,87620 -flask/blueprints.py,sha256=ZpVrwa8UY-YnVDsX_1K10XQjDwCUp7Qn2hmKln5icEQ,24332 -flask/cli.py,sha256=wRxX61jRDKQM4iZsYaVwcgGbpN2_2DmntLMWjVeiAx4,33720 -flask/config.py,sha256=yqdiN7TLOs2EChJ0uhTz3SICA3-QBG6l5wHTIUnANpc,12800 -flask/ctx.py,sha256=x2kGzUXtPzVyi2YSKrU_PV1AvtxTmh2iRdriJRTSPGM,14841 -flask/debughelpers.py,sha256=BR0xkd-sAyFuFW07D6NfrqNwSZxk1IrkG5n8zem-3sw,5547 -flask/globals.py,sha256=KUzVvSPh8v28kUasVDi_aQKB9hI2jZSYQHqaDU2P414,2945 -flask/helpers.py,sha256=QDxFmBW9GGXQDLuXrcxQRL0Ldo-_q11zEt3ZVgfINlI,24957 -flask/json/__init__.py,sha256=pdtpoK2b0b1u7Sxbx3feM7VWhsI20l1yGAvbYWxaxvc,5572 -flask/json/__pycache__/__init__.cpython-311.pyc,, -flask/json/__pycache__/provider.cpython-311.pyc,, -flask/json/__pycache__/tag.cpython-311.pyc,, -flask/json/provider.py,sha256=Os0frb8oGfyWKL-TDxb0Uy-MY6gDhPdJkRaUl5xAOXI,7637 -flask/json/tag.py,sha256=ihb7QWrNEr0YC3KD4TolZbftgSPCuLk7FAvK49huYC0,8871 -flask/logging.py,sha256=lArx2Bq9oTtUJ-DnZL9t88xU2zytzp4UWSM9Bd72NDQ,2327 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/scaffold.py,sha256=0tYQN98sC93YkIEw9g8BiIwceFZ27tNqBtBtFhFy5tY,35231 -flask/sessions.py,sha256=rFH2QKXG24dEazkKGxAHqUpAUh_30hDHrddhVYgAcY0,14169 -flask/signals.py,sha256=s1H4yKjf3c5dgVr41V6sJpE9dLJvmTJMYuK0rkqx3sw,1146 -flask/templating.py,sha256=XdP2hMFnZ5FCZOG7HUaLjC2VC-b4uHSWlDjwv_1p3qc,7503 -flask/testing.py,sha256=52-m5GecDcA-F2dFEYe8eDwApusxdg6S1suBaSC85N0,9768 -flask/typing.py,sha256=4Lj-YTxUoYvPYofC9GKu-1o0Ht8lyjp9z3I336J13_o,3005 -flask/views.py,sha256=V5hOGZLx0Bn99QGcM6mh5x_uM-MypVT0-RysEFU84jc,6789 -flask/wrappers.py,sha256=PhMp3teK3SnEmIdog59cO_DHiZ9Btn0qI1EifrTdwP8,5709 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/REQUESTED b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/entry_points.txt b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/entry_points.txt deleted file mode 100644 index 137232d..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -flask = flask.cli:main diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/top_level.txt deleted file mode 100644 index 7e10602..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask-2.3.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/LICENSE b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/LICENSE deleted file mode 100644 index f5c10ab..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Miguel Grinberg - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/METADATA deleted file mode 100644 index d259cbb..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/METADATA +++ /dev/null @@ -1,75 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-SocketIO -Version: 5.3.4 -Summary: Socket.IO integration for Flask applications -Home-page: https://github.com/miguelgrinberg/flask-socketio -Author: Miguel Grinberg -Author-email: miguel.grinberg@gmail.com -Project-URL: Bug Tracker, https://github.com/miguelgrinberg/flask-socketio/issues -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: Flask (>=0.9) -Requires-Dist: python-socketio (>=5.0.2) - -Flask-SocketIO -============== - -[![Build status](https://github.com/miguelgrinberg/flask-socketio/workflows/build/badge.svg)](https://github.com/miguelgrinberg/Flask-SocketIO/actions) [![codecov](https://codecov.io/gh/miguelgrinberg/flask-socketio/branch/main/graph/badge.svg)](https://codecov.io/gh/miguelgrinberg/flask-socketio) - -Socket.IO integration for Flask applications. - -Sponsors --------- - -The following organizations are funding this project: - -![Socket.IO](https://images.opencollective.com/socketio/050e5eb/logo/64.png)
[Socket.IO](https://socket.io) | [Add your company here!](https://github.com/sponsors/miguelgrinberg)| --|- - -Many individual sponsors also support this project through small ongoing contributions. Why not [join them](https://github.com/sponsors/miguelgrinberg)? - -Installation ------------- - -You can install this package as usual with pip: - - pip install flask-socketio - -Example -------- - -```py -from flask import Flask, render_template -from flask_socketio import SocketIO, emit - -app = Flask(__name__) -app.config['SECRET_KEY'] = 'secret!' -socketio = SocketIO(app) - -@app.route('/') -def index(): - return render_template('index.html') - -@socketio.event -def my_event(message): - emit('my response', {'data': 'got it!'}) - -if __name__ == '__main__': - socketio.run(app) -``` - -Resources ---------- - -- [Tutorial](http://blog.miguelgrinberg.com/post/easy-websockets-with-flask-and-gevent) -- [Documentation](http://flask-socketio.readthedocs.io/en/latest/) -- [PyPI](https://pypi.python.org/pypi/Flask-SocketIO) -- [Change Log](https://github.com/miguelgrinberg/Flask-SocketIO/blob/main/CHANGES.md) -- Questions? See the [questions](https://stackoverflow.com/questions/tagged/flask-socketio) others have asked on Stack Overflow, or [ask](https://stackoverflow.com/questions/ask?tags=python+flask-socketio+python-socketio) your own question. - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/RECORD deleted file mode 100644 index 6211f9e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -Flask_SocketIO-5.3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_SocketIO-5.3.4.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082 -Flask_SocketIO-5.3.4.dist-info/METADATA,sha256=DAt6N9d2trl12V9EVnECC94FkHjl_4tZSlCe_nQf3WM,2574 -Flask_SocketIO-5.3.4.dist-info/RECORD,, -Flask_SocketIO-5.3.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_SocketIO-5.3.4.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -Flask_SocketIO-5.3.4.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15 -flask_socketio/__init__.py,sha256=RVXfUJi1MypU4Fq5MorzUquMMQUB_tYBgCrkOYfjMQ8,54782 -flask_socketio/__pycache__/__init__.cpython-311.pyc,, -flask_socketio/__pycache__/namespace.cpython-311.pyc,, -flask_socketio/__pycache__/test_client.cpython-311.pyc,, -flask_socketio/namespace.py,sha256=b3oyXEemu2po-wpoy4ILTHQMVuVQqicogCDxfymfz_w,2020 -flask_socketio/test_client.py,sha256=TZGQzhjQEnTN5JjK9a3-7DyA0o3Qgk8mkf5fOMOfFYA,10274 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/REQUESTED b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/top_level.txt deleted file mode 100644 index ba82ec3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Flask_SocketIO-5.3.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_socketio diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/METADATA deleted file mode 100644 index f54bb5c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/METADATA +++ /dev/null @@ -1,113 +0,0 @@ -Metadata-Version: 2.1 -Name: Jinja2 -Version: 3.1.2 -Summary: A very fast and expressive template engine. -Home-page: https://palletsprojects.com/p/jinja/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/jinja/ -Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: MarkupSafe (>=2.0) -Provides-Extra: i18n -Requires-Dist: Babel (>=2.7) ; extra == 'i18n' - -Jinja -===== - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Jinja2 - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -In A Nutshell -------------- - -.. code-block:: jinja - - {% extends "base.html" %} - {% block title %}Members{% endblock %} - {% block content %} - - {% endblock %} - - -Donate ------- - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://jinja.palletsprojects.com/ -- Changes: https://jinja.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Jinja2/ -- Source Code: https://github.com/pallets/jinja/ -- Issue Tracker: https://github.com/pallets/jinja/issues/ -- Website: https://palletsprojects.com/p/jinja/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/RECORD deleted file mode 100644 index b20e953..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539 -Jinja2-3.1.2.dist-info/RECORD,, -Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 -Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 -jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927 -jinja2/__pycache__/__init__.cpython-311.pyc,, -jinja2/__pycache__/_identifier.cpython-311.pyc,, -jinja2/__pycache__/async_utils.cpython-311.pyc,, -jinja2/__pycache__/bccache.cpython-311.pyc,, -jinja2/__pycache__/compiler.cpython-311.pyc,, -jinja2/__pycache__/constants.cpython-311.pyc,, -jinja2/__pycache__/debug.cpython-311.pyc,, -jinja2/__pycache__/defaults.cpython-311.pyc,, -jinja2/__pycache__/environment.cpython-311.pyc,, -jinja2/__pycache__/exceptions.cpython-311.pyc,, -jinja2/__pycache__/ext.cpython-311.pyc,, -jinja2/__pycache__/filters.cpython-311.pyc,, -jinja2/__pycache__/idtracking.cpython-311.pyc,, -jinja2/__pycache__/lexer.cpython-311.pyc,, -jinja2/__pycache__/loaders.cpython-311.pyc,, -jinja2/__pycache__/meta.cpython-311.pyc,, -jinja2/__pycache__/nativetypes.cpython-311.pyc,, -jinja2/__pycache__/nodes.cpython-311.pyc,, -jinja2/__pycache__/optimizer.cpython-311.pyc,, -jinja2/__pycache__/parser.cpython-311.pyc,, -jinja2/__pycache__/runtime.cpython-311.pyc,, -jinja2/__pycache__/sandbox.cpython-311.pyc,, -jinja2/__pycache__/tests.cpython-311.pyc,, -jinja2/__pycache__/utils.cpython-311.pyc,, -jinja2/__pycache__/visitor.cpython-311.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472 -jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 -jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502 -jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509 -jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 -jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 -jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207 -jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 -jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226 -jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 -jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 -jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476 -jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 -jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 -jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965 -jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt deleted file mode 100644 index 7b9666c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[babel.extractors] -jinja2 = jinja2.ext:babel_extract[i18n] diff --git a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/top_level.txt deleted file mode 100644 index 7f7afbf..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Jinja2-3.1.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -jinja2 diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/METADATA deleted file mode 100644 index bced165..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/METADATA +++ /dev/null @@ -1,93 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 2.1.3 -Summary: Safely add untrusted strings to HTML/XML markup. -Home-page: https://palletsprojects.com/p/markupsafe/ -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/markupsafe/ -Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -MarkupSafe -========== - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U MarkupSafe - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -Examples --------- - -.. code-block:: pycon - - >>> from markupsafe import Markup, escape - - >>> # escape replaces special characters and wraps in Markup - >>> escape("") - Markup('<script>alert(document.cookie);</script>') - - >>> # wrap in Markup to mark text "safe" and prevent escaping - >>> Markup("Hello") - Markup('hello') - - >>> escape(Markup("Hello")) - Markup('hello') - - >>> # Markup is a str subclass - >>> # methods and operators escape their arguments - >>> template = Markup("Hello {name}") - >>> template.format(name='"World"') - Markup('Hello "World"') - - -Donate ------- - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://markupsafe.palletsprojects.com/ -- Changes: https://markupsafe.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/MarkupSafe/ -- Source Code: https://github.com/pallets/markupsafe/ -- Issue Tracker: https://github.com/pallets/markupsafe/issues/ -- Chat: https://discord.gg/pallets diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/RECORD deleted file mode 100644 index 1c787f4..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -MarkupSafe-2.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-2.1.3.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -MarkupSafe-2.1.3.dist-info/METADATA,sha256=Wvvh4Tz-YtW24YagYdqrrrBdm9m-DjTdqJWhxlbU6-0,3003 -MarkupSafe-2.1.3.dist-info/RECORD,, -MarkupSafe-2.1.3.dist-info/WHEEL,sha256=1gGdl8c7V4tzGrOfjA4xbXg5uJOisfAwei4hK84YpXE,115 -MarkupSafe-2.1.3.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=xIItqrn1Bwi7FxPJO9rCVQBG0Evewue1Tl4BV0l9xEs,10338 -markupsafe/__pycache__/__init__.cpython-311.pyc,, -markupsafe/__pycache__/_native.cpython-311.pyc,, -markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 -markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 -markupsafe/_speedups.cpython-311-darwin.so,sha256=tzy2iIP3YqrDJlsEvshIKNMf7FwfWLcVq1OeD7W7HnM,117484 -markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL deleted file mode 100644 index ebece4c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: false -Tag: cp311-cp311-macosx_10_9_universal2 - diff --git a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/METADATA deleted file mode 100644 index ebeb6c9..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/METADATA +++ /dev/null @@ -1,119 +0,0 @@ -Metadata-Version: 2.1 -Name: Werkzeug -Version: 2.3.6 -Summary: The comprehensive WSGI web application library. -Maintainer-email: Pallets -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://werkzeug.palletsprojects.com/ -Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/werkzeug/ -Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: MarkupSafe (>=2.1.1) -Provides-Extra: watchdog -Requires-Dist: watchdog (>=2.3) ; extra == 'watchdog' - -Werkzeug -======== - -*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") - -Werkzeug is a comprehensive `WSGI`_ web application library. It began as -a simple collection of various utilities for WSGI applications and has -become one of the most advanced WSGI utility libraries. - -It includes: - -- An interactive debugger that allows inspecting stack traces and - source code in the browser with an interactive interpreter for any - frame in the stack. -- A full-featured request object with objects to interact with - headers, query args, form data, files, and cookies. -- A response object that can wrap other WSGI applications and handle - streaming data. -- A routing system for matching URLs to endpoints and generating URLs - for endpoints, with an extensible system for capturing variables - from URLs. -- HTTP utilities to handle entity tags, cache control, dates, user - agents, cookies, files, and more. -- A threaded WSGI server for use while developing applications - locally. -- A test client for simulating HTTP requests during testing without - requiring running a server. - -Werkzeug doesn't enforce any dependencies. It is up to the developer to -choose a template engine, database adapter, and even how to handle -requests. It can be used to build all sorts of end user applications -such as blogs, wikis, or bulletin boards. - -`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while -providing more structure and patterns for defining powerful -applications. - -.. _WSGI: https://wsgi.readthedocs.io/en/latest/ -.. _Flask: https://www.palletsprojects.com/p/flask/ - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U Werkzeug - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - from werkzeug.wrappers import Request, Response - - @Request.application - def application(request): - return Response('Hello, World!') - - if __name__ == '__main__': - from werkzeug.serving import run_simple - run_simple('localhost', 4000, application) - - -Donate ------- - -The Pallets organization develops and supports Werkzeug and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://werkzeug.palletsprojects.com/ -- Changes: https://werkzeug.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Werkzeug/ -- Source Code: https://github.com/pallets/werkzeug/ -- Issue Tracker: https://github.com/pallets/werkzeug/issues/ -- Chat: https://discord.gg/pallets diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/RECORD deleted file mode 100644 index 8815d2e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/RECORD +++ /dev/null @@ -1,126 +0,0 @@ -Werkzeug-2.3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Werkzeug-2.3.6.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Werkzeug-2.3.6.dist-info/METADATA,sha256=BdsAbo_jtuo9bG0Tx6vBULMr7VPCHnHGo_Rvza8KSFc,4146 -Werkzeug-2.3.6.dist-info/RECORD,, -Werkzeug-2.3.6.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -Werkzeug-2.3.6.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 -werkzeug/__init__.py,sha256=GE-glr5Ri57UhBnkXrFicJrcM3JMATvBS3SEdi3rR6Q,188 -werkzeug/__pycache__/__init__.cpython-311.pyc,, -werkzeug/__pycache__/_internal.cpython-311.pyc,, -werkzeug/__pycache__/_reloader.cpython-311.pyc,, -werkzeug/__pycache__/exceptions.cpython-311.pyc,, -werkzeug/__pycache__/formparser.cpython-311.pyc,, -werkzeug/__pycache__/http.cpython-311.pyc,, -werkzeug/__pycache__/local.cpython-311.pyc,, -werkzeug/__pycache__/security.cpython-311.pyc,, -werkzeug/__pycache__/serving.cpython-311.pyc,, -werkzeug/__pycache__/test.cpython-311.pyc,, -werkzeug/__pycache__/testapp.cpython-311.pyc,, -werkzeug/__pycache__/urls.cpython-311.pyc,, -werkzeug/__pycache__/user_agent.cpython-311.pyc,, -werkzeug/__pycache__/utils.cpython-311.pyc,, -werkzeug/__pycache__/wsgi.cpython-311.pyc,, -werkzeug/_internal.py,sha256=tbijqLWDIRP_AaPSBswRI5KuzDB3Dy5M6rRGFlCAqt4,8688 -werkzeug/_reloader.py,sha256=1O1DDWlqVwYIX8kgJwH5B4a_Uh6acQnw3sQf01JpXtM,14745 -werkzeug/datastructures/__init__.py,sha256=yzBdOT9DdK3nraNG49pA3bVsvtPPLx2-t2N8ZmuAd9w,1900 -werkzeug/datastructures/__pycache__/__init__.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/accept.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/auth.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/cache_control.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/csp.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/etag.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/file_storage.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/headers.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/mixins.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/range.cpython-311.pyc,, -werkzeug/datastructures/__pycache__/structures.cpython-311.pyc,, -werkzeug/datastructures/accept.py,sha256=CuCvBAxNzbt4QUb17rH986vvOVGURFUjo0DX2PQy_yI,10670 -werkzeug/datastructures/accept.pyi,sha256=6P114gncjZoy-i_n_3OQy2nJVwjEAIe7PcBxKYqCEfc,1917 -werkzeug/datastructures/auth.py,sha256=Mm92MLyv_M9Cgto8oNz65l_dZZ8sAibyY6NuKDW3pcs,16040 -werkzeug/datastructures/cache_control.py,sha256=RTUipZev50s-1TAn2rYGZrytm_6IOIxQd67fkR5bNF0,6043 -werkzeug/datastructures/cache_control.pyi,sha256=6Q93jRysAKMPWRA72OMksyn7d3ZysuxwGlHp_iwF9pA,3756 -werkzeug/datastructures/csp.py,sha256=DAOAO266LK0JKbvlG80bbkAgfrNsnU9HBoz-FdIYNdo,3244 -werkzeug/datastructures/csp.pyi,sha256=AmDWiZU4rrJA4SZmyMNI1L5PLdIfJsI5Li9r5lE1q6M,5765 -werkzeug/datastructures/etag.py,sha256=JsyI-yXayF-hQu26MyFzbHFIZsaQ6odj3RZO_jF-_cc,2913 -werkzeug/datastructures/etag.pyi,sha256=N9cuUBrZnxHmsbW0BBmjKW-djNY7WKbI6t_WopB8Zo0,1047 -werkzeug/datastructures/file_storage.py,sha256=ePeMtr65s_1_sunXMv_SBOiFof5CX5BepYv5_W16fZk,6184 -werkzeug/datastructures/file_storage.pyi,sha256=2sdbKHhvbQF5FjrJuO6l_m1yZvZ4oPCUTspmdmjQlSU,1433 -werkzeug/datastructures/headers.py,sha256=V08N4VTcaA11fRq1WK5v28QomGd-A1S9CmiwugixhWo,18882 -werkzeug/datastructures/headers.pyi,sha256=66Gh9DbD8QNpLRBOuer4DMCj12csddHrcgxiJPLE5n8,4237 -werkzeug/datastructures/mixins.py,sha256=-IQSQ70UOMQlqtJEIyyhplOd4obaTOfzGvka-cunCtM,5337 -werkzeug/datastructures/mixins.pyi,sha256=y92tClxVslJBEGgAwDRsQLExfin2p0x7NfnP_b8w6xc,4191 -werkzeug/datastructures/range.py,sha256=JXSDPseG7iH5giJp3R1SnQC_SqQp634M8Iv6QTsbTxM,5669 -werkzeug/datastructures/range.pyi,sha256=bsM61iNp86gT2lyN0F_Dqg8xsnfPerdmElipuHppiJQ,1792 -werkzeug/datastructures/structures.py,sha256=_bhAf0adEk6WU2uy8jdmuxFMTFcuClY1p7jQ-3wYXj4,31761 -werkzeug/datastructures/structures.pyi,sha256=MRg-RubT3UPjh62i9-7Xht8DVL0zTApRzjs52Hfz_j4,8148 -werkzeug/debug/__init__.py,sha256=WRTLJSvnuK6jlBuQLllTnN57th0HKPjxbS7-d8QJZIc,18760 -werkzeug/debug/__pycache__/__init__.cpython-311.pyc,, -werkzeug/debug/__pycache__/console.cpython-311.pyc,, -werkzeug/debug/__pycache__/repr.cpython-311.pyc,, -werkzeug/debug/__pycache__/tbtools.cpython-311.pyc,, -werkzeug/debug/console.py,sha256=FIO8gDX2eQ1_4MtpJ4s0i2gR4fFCJZTPwhSVByF4kbo,6068 -werkzeug/debug/repr.py,sha256=ECmIpNVlCppTfCuIuEgrJVfuhr8iDqPSWeVJyxt1QOM,9328 -werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 -werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 -werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 -werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 -werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 -werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078 -werkzeug/debug/tbtools.py,sha256=8Xg7p2JzCC1AMWuse5HYc594OdzC5ToeJbNk49_zZCc,13271 -werkzeug/exceptions.py,sha256=d6VNzGcVgLazIpfwRD8pN_d3yAJNyngBDFvlXQbR-38,26062 -werkzeug/formparser.py,sha256=DZ9BeiHAah3_CuBORNOEipRwE74lHRFX1eK2_3XKcL4,19574 -werkzeug/http.py,sha256=lR6WM_GatD5P4_y1VCix2pqDMHex73fz7TkOI3kHHwU,48712 -werkzeug/local.py,sha256=zrXlO1IP3KTz310h9LSdVKMaFsJfNyXkfCYCkbvlBXQ,22075 -werkzeug/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -werkzeug/middleware/__pycache__/__init__.cpython-311.pyc,, -werkzeug/middleware/__pycache__/dispatcher.cpython-311.pyc,, -werkzeug/middleware/__pycache__/http_proxy.cpython-311.pyc,, -werkzeug/middleware/__pycache__/lint.cpython-311.pyc,, -werkzeug/middleware/__pycache__/profiler.cpython-311.pyc,, -werkzeug/middleware/__pycache__/proxy_fix.cpython-311.pyc,, -werkzeug/middleware/__pycache__/shared_data.cpython-311.pyc,, -werkzeug/middleware/dispatcher.py,sha256=6ltzPtDsIdLTY_T1GW6kxBJL0KZftbipa_WVdKtpVQ8,2601 -werkzeug/middleware/http_proxy.py,sha256=vsSvt84m656x3mV_Fj78y7O2eYHmurWngErTcjeiz8U,7833 -werkzeug/middleware/lint.py,sha256=6CqcwMWro1p-GRUGPgQ1n21KFnTTqc6-81CGTzpcK74,13916 -werkzeug/middleware/profiler.py,sha256=KKr8nAiF9dr9pNd3G0D3xs7mUba9gvWkyK7X9ceke70,4906 -werkzeug/middleware/proxy_fix.py,sha256=dcOOSjSok2QsSh1VSNsw-a0Vy_Jn5DunlO6PRbXBq0A,6754 -werkzeug/middleware/shared_data.py,sha256=DeM8OouhfhZs8w5T7Wxw-uKuOHXoH0x5RopzxR2RRjI,9513 -werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -werkzeug/routing/__init__.py,sha256=HpvahY7WwkLdV4Cq3Bsc3GrqNon4u6t8-vhbb9E5o00,4819 -werkzeug/routing/__pycache__/__init__.cpython-311.pyc,, -werkzeug/routing/__pycache__/converters.cpython-311.pyc,, -werkzeug/routing/__pycache__/exceptions.cpython-311.pyc,, -werkzeug/routing/__pycache__/map.cpython-311.pyc,, -werkzeug/routing/__pycache__/matcher.cpython-311.pyc,, -werkzeug/routing/__pycache__/rules.cpython-311.pyc,, -werkzeug/routing/converters.py,sha256=V8e_wMRop6WG4Kymu4pBIR8OrJl-ZUQUZlinUXfw7WE,7602 -werkzeug/routing/exceptions.py,sha256=yGZ5AUL-buHp-vK8AJbZ0bLIbSckh1UyiGKgRg4ZjaA,4698 -werkzeug/routing/map.py,sha256=2tirw9j5wypzsUT6WBcBNcBTqNp0_iBXnF_1vhY9HjI,37403 -werkzeug/routing/matcher.py,sha256=FyPG45iqR1XwxFujejSqfNEKV7IgbR2td7Jp-ocSASY,7817 -werkzeug/routing/rules.py,sha256=dq0NO-0ZVG3OX7-8FFd0S-bZUgyzGoF4JYnlYC5bpy4,32048 -werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -werkzeug/sansio/__pycache__/__init__.cpython-311.pyc,, -werkzeug/sansio/__pycache__/http.cpython-311.pyc,, -werkzeug/sansio/__pycache__/multipart.cpython-311.pyc,, -werkzeug/sansio/__pycache__/request.cpython-311.pyc,, -werkzeug/sansio/__pycache__/response.cpython-311.pyc,, -werkzeug/sansio/__pycache__/utils.cpython-311.pyc,, -werkzeug/sansio/http.py,sha256=mKTbXo_squCAZKjt9yzfPFV8ZqQbfa6mjdc6XoeLNZ0,6234 -werkzeug/sansio/multipart.py,sha256=VTP_jhRRxYDX-1_1oge_b2CK3KTLnw3LB0k8b2zpiHI,11087 -werkzeug/sansio/request.py,sha256=wEeVGySwlOfJT5xlgQzjJOe2ksky70CJT75QTzkvfqM,24243 -werkzeug/sansio/response.py,sha256=WSsWrz-6FjtrRKun3Ha0a4sF78Su0kp8IzfrgmU_WOI,29011 -werkzeug/sansio/utils.py,sha256=LYgmrN7yr04ZDVk5flPcUJLo1rDnTzhF04OH3-ujCWQ,4950 -werkzeug/security.py,sha256=gEH8qD5Ykgn6W6PgMx2CQx-iNqJFenXXqOGiWDi_3eE,5814 -werkzeug/serving.py,sha256=Ql_SUZxsmQzN8OZ-hDvKFQ5nRgKh6FEIYwcXVEmD6qU,39224 -werkzeug/test.py,sha256=OVpg33rnFwDJ5Jya7639PKztEB7N32WAoQTVqH1p6zo,55645 -werkzeug/testapp.py,sha256=w9AdbZcmSvydD-OP6EjxVENuaZof9MkbYNFVALhcoqQ,6151 -werkzeug/urls.py,sha256=Uq_cu8TmZFHkQ7t2pp9DNwDvs6wG76jzWPstQIssPVk,45683 -werkzeug/user_agent.py,sha256=lSlLYKCcbzCUSkbdAoO8zPk2UR-8Mdn6iu_iA2kYPBA,1416 -werkzeug/utils.py,sha256=DYkOtfDR_Wc3ro3_peReo9KkUC-6yhOvz27_PUAckbA,24654 -werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120 -werkzeug/wrappers/__pycache__/__init__.cpython-311.pyc,, -werkzeug/wrappers/__pycache__/request.cpython-311.pyc,, -werkzeug/wrappers/__pycache__/response.cpython-311.pyc,, -werkzeug/wrappers/request.py,sha256=_PIbgCZ9xfQXC9HEjm-j1R-F4gSPcx5q-QT983mMzbs,24848 -werkzeug/wrappers/response.py,sha256=FfGesquK6cSdPTFZvzV42CM__Ohta2cxNqLBDRkAuKA,32664 -werkzeug/wsgi.py,sha256=PGkhajtHnJj2NqYpYW_T8w17JJbaH8iI0wHHNkPvJKs,29153 diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/top_level.txt deleted file mode 100644 index 6fe8da8..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/Werkzeug-2.3.6.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -werkzeug diff --git a/venv_flaskchat/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc deleted file mode 100644 index aa1ac30..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index f987a53..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,222 +0,0 @@ -# don't import any costly modules -import sys -import os - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - import warnings - - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils." - ) - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - import warnings - - warnings.warn("Setuptools is replacing distutils.") - mods = [ - name - for name in sys.modules - if name == "distutils" or name.startswith("distutils.") - ] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') - return which == 'local' - - -def ensure_local_distutils(): - import importlib - - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - with shim(): - importlib.import_module('distutils') - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - assert 'setuptools._distutils.log' not in sys.modules - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class _TrivialRe: - def __init__(self, *patterns): - self._patterns = patterns - - def match(self, string): - return all(pat in string for pat in self._patterns) - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - # optimization: only consider top level modules and those - # found in the CPython test suite. - if path is not None and not fullname.startswith('test.'): - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - if self.is_cpython(): - return - - import importlib - import importlib.abc - import importlib.util - - try: - mod = importlib.import_module('setuptools._distutils') - except Exception: - # There are a couple of cases where setuptools._distutils - # may not be present: - # - An older Setuptools without a local distutils is - # taking precedence. Ref #2957. - # - Path manipulation during sitecustomize removes - # setuptools from the path but only after the hook - # has been loaded. Ref #2980. - # In either case, fall back to stdlib behavior. - return - - class DistutilsLoader(importlib.abc.Loader): - def create_module(self, spec): - mod.__name__ = 'distutils' - return mod - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader( - 'distutils', DistutilsLoader(), origin=mod.__file__ - ) - - @staticmethod - def is_cpython(): - """ - Suppress supplying distutils for CPython (build and tests). - Ref #2965 and #3007. - """ - return os.path.isfile('pybuilddir.txt') - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @classmethod - def pip_imported_during_build(cls): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - - return any( - cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) - ) - - @staticmethod - def frame_file_is_setup(frame): - """ - Return True if the indicated frame suggests a setup.py file. - """ - # some frames may not have __file__ (#2940) - return frame.f_globals.get('__file__', '').endswith('setup.py') - - def spec_for_sensitive_tests(self): - """ - Ensure stdlib distutils when running select tests under CPython. - - python/cpython#91169 - """ - clear_distutils() - self.spec_for_distutils = lambda: None - - sensitive_tests = ( - [ - 'test.test_distutils', - 'test.test_peg_generator', - 'test.test_importlib', - ] - if sys.version_info < (3, 10) - else [ - 'test.test_distutils', - ] - ) - - -for name in DistutilsMetaFinder.sensitive_tests: - setattr( - DistutilsMetaFinder, - f'spec_for_{name}', - DistutilsMetaFinder.spec_for_sensitive_tests, - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - DISTUTILS_FINDER in sys.meta_path or insert_shim() - - -class shim: - def __enter__(self): - insert_shim() - - def __exit__(self, exc, value, tb): - remove_shim() - - -def insert_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 24a4675..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc deleted file mode 100644 index 469ed12..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/override.py b/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/METADATA deleted file mode 100644 index a302cf3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/METADATA +++ /dev/null @@ -1,221 +0,0 @@ -Metadata-Version: 2.1 -Name: annotated-types -Version: 0.5.0 -Summary: Reusable constraint types to use with typing.Annotated -Author-email: Samuel Colvin , Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Zac Hatfield-Dodds -License-File: LICENSE -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Environment :: MacOS X -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Unix -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' -Description-Content-Type: text/markdown - -# annotated-types - -[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) -[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) -[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) -[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) - -[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of -adding context-specific metadata to existing types, and specifies that -`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special -logic for `x`. - -This package provides metadata objects which can be used to represent common -constraints such as upper and lower bounds on scalar values and collection sizes, -a `Predicate` marker for runtime checks, and -descriptions of how we intend these metadata to be interpreted. In some cases, -we also note alternative representations which do not require this package. - -## Install - -```bash -pip install annotated-types -``` - -## Examples - -```python -from typing import Annotated -from annotated_types import Gt, Len, Predicate - -class MyClass: - age: Annotated[int, Gt(18)] # Valid: 19, 20, ... - # Invalid: 17, 18, "19", 19.0, ... - factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... - # Invalid: 4, 8, -2, 5.0, "prime", ... - - my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] - # Invalid: (1, 2), ["abc"], [0] * 20 -``` - -## Documentation - -_While `annotated-types` avoids runtime checks for performance, users should not -construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. -Downstream implementors may choose to raise an error, emit a warning, silently ignore -a metadata item, etc., if the metadata objects described below are used with an -incompatible type - or for any other reason!_ - -### Gt, Ge, Lt, Le - -Express inclusive and/or exclusive bounds on orderable values - which may be numbers, -dates, times, strings, sets, etc. Note that the boundary value need not be of the -same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` -is fine, for example, and implies that the value is an integer x such that `x > 1.5`. - -We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` -as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on -the `annotated-types` package. - -To be explicit, these types have the following meanings: - -* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum -* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum -* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum -* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum - -### Interval - -`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single -metadata object. `None` attributes should be ignored, and non-`None` attributes -treated as per the single bounds above. - -### MultipleOf - -`MultipleOf(multiple_of=x)` might be interpreted in two ways: - -1. Python semantics, implying `value % multiple_of == 0`, or -2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), - where `int(value / multiple_of) == value / multiple_of`. - -We encourage users to be aware of these two common interpretations and their -distinct behaviours, especially since very large or non-integer numbers make -it easy to cause silent data corruption due to floating-point imprecision. - -We encourage libraries to carefully document which interpretation they implement. - -### MinLen, MaxLen, Len - -`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. - -As well as `Len()` which can optionally include upper and lower bounds, we also -provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` -and `Len(max_length=y)` respectively. - -`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. - -Examples of usage: - -* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less -* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less -* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more -* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 -* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 - -#### Changed in v0.4.0 - -* `min_inclusive` has been renamed to `min_length`, no change in meaning -* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** -* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic - meaning of the upper bound in slices vs. `Len` - -See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. - -### Timezone - -`Timezone` can be used with a `datetime` or a `time` to express which timezones -are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. -`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) -expresses that any timezone-aware datetime is allowed. You may also pass a specific -timezone string or `timezone` object such as `Timezone(timezone.utc)` or -`Timezone("Africa/Abidjan")` to express that you only allow a specific timezone, -though we note that this is often a symptom of fragile design. - -### Predicate - -`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. -Users should prefer the statically inspectable metadata above, but if you need -the full power and flexibility of arbitrary runtime predicates... here it is. - -We provide a few predefined predicates for common string constraints: -`IsLower = Predicate(str.islower)`, `IsUpper = Predicate(str.isupper)`, and -`IsDigit = Predicate(str.isdigit)`. -Some libraries might have special logic to handle known or understandable predicates, -for example by checking for `str.isdigit` and using its presence to both call custom -logic to enforce digit-only strings, and customise some generated external schema. -Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in -favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. - -We do not specify what behaviour should be expected for predicates that raise -an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently -skip invalid constraints, or statically raise an error; or it might try calling it -and then propogate or discard the resulting -`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` -exception. We encourage libraries to document the behaviour they choose. - -### Integrating downstream types with `GroupedMetadata` - -Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. -This can help reduce verbosity and cognitive overhead for users. -For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: - -```python -from dataclasses import dataclass -from typing import Iterator -from annotated_types import GroupedMetadata, Ge - -@dataclass -class Field(GroupedMetadata): - ge: int | None = None - description: str | None = None - - def __iter__(self) -> Iterator[object]: - # Iterating over a GroupedMetadata object should yield annotated-types - # constraint metadata objects which describe it as fully as possible, - # and may include other unknown objects too. - if self.ge is not None: - yield Ge(self.ge) - if self.description is not None: - yield Description(self.description) -``` - -Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. - -Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. - -Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. - -### Consuming metadata - -We intend to not be perspcriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). - -It is up to the implementer to determine how this metadata is used. -You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. - -## Design & History - -This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic -and Hypothesis, with the goal of making it as easy as possible for end-users to -provide more informative annotations for use by runtime libraries. - -It is deliberately minimal, and following PEP-593 allows considerable downstream -discretion in what (if anything!) they choose to support. Nonetheless, we expect -that staying simple and covering _only_ the most common use-cases will give users -and maintainers the best experience we can. If you'd like more constraints for your -types - follow our lead, by defining them and documenting them downstream! diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/RECORD deleted file mode 100644 index d70ee0c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -annotated_types-0.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -annotated_types-0.5.0.dist-info/METADATA,sha256=ie2NTBuiFKST5M2yUVgOgymZkQ5iad82BPFcZ07ZKBQ,11685 -annotated_types-0.5.0.dist-info/RECORD,, -annotated_types-0.5.0.dist-info/WHEEL,sha256=y1bSCq4r5i4nMmpXeUJMqs3ipKvkZObrIXSvJHm1qCI,87 -annotated_types-0.5.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 -annotated_types/__init__.py,sha256=qQOWO1uHTw0QjOL1Ggcs92oY4OPX0G7srTdq_M5lC-8,9375 -annotated_types/__pycache__/__init__.cpython-311.pyc,, -annotated_types/__pycache__/test_cases.cpython-311.pyc,, -annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -annotated_types/test_cases.py,sha256=z-ftQXel0GT9FjAN2PVXi8ZUVV7F1T9PvwZ5l-UAqpU,5680 diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/WHEEL deleted file mode 100644 index 2762755..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.17.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/licenses/LICENSE b/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/licenses/LICENSE deleted file mode 100644 index d99323a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types-0.5.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 the contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__init__.py deleted file mode 100644 index 644db6f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__init__.py +++ /dev/null @@ -1,319 +0,0 @@ -import sys -from dataclasses import dataclass -from datetime import timezone -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, TypeVar, Union - -if sys.version_info < (3, 8): - from typing_extensions import Protocol, runtime_checkable -else: - from typing import Protocol, runtime_checkable - -if sys.version_info < (3, 9): - from typing_extensions import Annotated, Literal -else: - from typing import Annotated, Literal - -if sys.version_info < (3, 10): - EllipsisType = type(Ellipsis) - KW_ONLY = {} - SLOTS = {} -else: - from types import EllipsisType - - KW_ONLY = {"kw_only": True} - SLOTS = {"slots": True} - - -__all__ = ( - 'BaseMetadata', - 'GroupedMetadata', - 'Gt', - 'Ge', - 'Lt', - 'Le', - 'Interval', - 'MultipleOf', - 'MinLen', - 'MaxLen', - 'Len', - 'Timezone', - 'Predicate', - 'LowerCase', - 'UpperCase', - 'IsDigits', - '__version__', -) - -__version__ = '0.5.0' - - -T = TypeVar('T') - - -# arguments that start with __ are considered -# positional only -# see https://peps.python.org/pep-0484/#positional-only-arguments - - -class SupportsGt(Protocol): - def __gt__(self: T, __other: T) -> bool: - ... - - -class SupportsGe(Protocol): - def __ge__(self: T, __other: T) -> bool: - ... - - -class SupportsLt(Protocol): - def __lt__(self: T, __other: T) -> bool: - ... - - -class SupportsLe(Protocol): - def __le__(self: T, __other: T) -> bool: - ... - - -class SupportsMod(Protocol): - def __mod__(self: T, __other: T) -> T: - ... - - -class SupportsDiv(Protocol): - def __div__(self: T, __other: T) -> T: - ... - - -class BaseMetadata: - """Base class for all metadata. - - This exists mainly so that implementers - can do `isinstance(..., BaseMetadata)` while traversing field annotations. - """ - - __slots__ = () - - -@dataclass(frozen=True, **SLOTS) -class Gt(BaseMetadata): - """Gt(gt=x) implies that the value must be greater than x. - - It can be used with any type that supports the ``>`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - gt: SupportsGt - - -@dataclass(frozen=True, **SLOTS) -class Ge(BaseMetadata): - """Ge(ge=x) implies that the value must be greater than or equal to x. - - It can be used with any type that supports the ``>=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - ge: SupportsGe - - -@dataclass(frozen=True, **SLOTS) -class Lt(BaseMetadata): - """Lt(lt=x) implies that the value must be less than x. - - It can be used with any type that supports the ``<`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - lt: SupportsLt - - -@dataclass(frozen=True, **SLOTS) -class Le(BaseMetadata): - """Le(le=x) implies that the value must be less than or equal to x. - - It can be used with any type that supports the ``<=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - le: SupportsLe - - -@runtime_checkable -class GroupedMetadata(Protocol): - """A grouping of multiple BaseMetadata objects. - - `GroupedMetadata` on its own is not metadata and has no meaning. - All it the the constraint and metadata should be fully expressable - in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. - - Concrete implementations should override `GroupedMetadata.__iter__()` - to add their own metadata. - For example: - - >>> @dataclass - >>> class Field(GroupedMetadata): - >>> gt: float | None = None - >>> description: str | None = None - ... - >>> def __iter__(self) -> Iterable[BaseMetadata]: - >>> if self.gt is not None: - >>> yield Gt(self.gt) - >>> if self.description is not None: - >>> yield Description(self.gt) - - Also see the implementation of `Interval` below for an example. - - Parsers should recognize this and unpack it so that it can be used - both with and without unpacking: - - - `Annotated[int, Field(...)]` (parser must unpack Field) - - `Annotated[int, *Field(...)]` (PEP-646) - """ # noqa: trailing-whitespace - - @property - def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: - return True - - def __iter__(self) -> Iterator[BaseMetadata]: - ... - - if not TYPE_CHECKING: - __slots__ = () # allow subclasses to use slots - - def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: - # Basic ABC like functionality without the complexity of an ABC - super().__init_subclass__(*args, **kwargs) - if cls.__iter__ is GroupedMetadata.__iter__: - raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") - - def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811 - raise NotImplementedError # more helpful than "None has no attribute..." type errors - - -@dataclass(frozen=True, **KW_ONLY, **SLOTS) -class Interval(GroupedMetadata): - """Interval can express inclusive or exclusive bounds with a single object. - - It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which - are interpreted the same way as the single-bound constraints. - """ - - gt: Union[SupportsGt, None] = None - ge: Union[SupportsGe, None] = None - lt: Union[SupportsLt, None] = None - le: Union[SupportsLe, None] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack an Interval into zero or more single-bounds.""" - if self.gt is not None: - yield Gt(self.gt) - if self.ge is not None: - yield Ge(self.ge) - if self.lt is not None: - yield Lt(self.lt) - if self.le is not None: - yield Le(self.le) - - -@dataclass(frozen=True, **SLOTS) -class MultipleOf(BaseMetadata): - """MultipleOf(multiple_of=x) might be interpreted in two ways: - - 1. Python semantics, implying ``value % multiple_of == 0``, or - 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` - - We encourage users to be aware of these two common interpretations, - and libraries to carefully document which they implement. - """ - - multiple_of: Union[SupportsDiv, SupportsMod] - - -@dataclass(frozen=True, **SLOTS) -class MinLen(BaseMetadata): - """ - MinLen() implies minimum inclusive length, - e.g. ``len(value) >= min_length``. - """ - - min_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class MaxLen(BaseMetadata): - """ - MaxLen() implies maximum inclusive length, - e.g. ``len(value) <= max_length``. - """ - - max_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class Len(GroupedMetadata): - """ - Len() implies that ``min_length <= len(value) <= max_length``. - - Upper bound may be omitted or ``None`` to indicate no upper length bound. - """ - - min_length: Annotated[int, Ge(0)] = 0 - max_length: Optional[Annotated[int, Ge(0)]] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack a Len into zone or more single-bounds.""" - if self.min_length > 0: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - - -@dataclass(frozen=True, **SLOTS) -class Timezone(BaseMetadata): - """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). - - ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. - ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be - tz-aware but any timezone is allowed. - - You may also pass a specific timezone string or timezone object such as - ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that - you only allow a specific timezone, though we note that this is often - a symptom of poor design. - """ - - tz: Union[str, timezone, EllipsisType, None] - - -@dataclass(frozen=True, **SLOTS) -class Predicate(BaseMetadata): - """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. - - Users should prefer statically inspectable metadata, but if you need the full - power and flexibility of arbitrary runtime predicates... here it is. - - We provide a few predefined predicates for common string constraints: - ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and - ``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which - can be given special handling, and avoid indirection like ``lambda s: s.lower()``. - - Some libraries might have special logic to handle certain predicates, e.g. by - checking for `str.isdigit` and using its presence to both call custom logic to - enforce digit-only strings, and customise some generated external schema. - - We do not specify what behaviour should be expected for predicates that raise - an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently - skip invalid constraints, or statically raise an error; or it might try calling it - and then propogate or discard the resulting exception. - """ - - func: Callable[[Any], bool] - - -StrType = TypeVar("StrType", bound=str) - -LowerCase = Annotated[StrType, Predicate(str.islower)] -UpperCase = Annotated[StrType, Predicate(str.isupper)] -IsDigits = Annotated[StrType, Predicate(str.isdigit)] -IsAscii = Annotated[StrType, Predicate(str.isascii)] diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index c2aa45e..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc deleted file mode 100644 index 228ef1c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/py.typed b/venv_flaskchat/lib/python3.11/site-packages/annotated_types/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/test_cases.py b/venv_flaskchat/lib/python3.11/site-packages/annotated_types/test_cases.py deleted file mode 100644 index ae2c084..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/annotated_types/test_cases.py +++ /dev/null @@ -1,133 +0,0 @@ -import sys -from datetime import date, datetime, timedelta, timezone -from decimal import Decimal -from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple - -if sys.version_info < (3, 9): - from typing_extensions import Annotated -else: - from typing import Annotated - -import annotated_types as at - - -class Case(NamedTuple): - """ - A test case for `annotated_types`. - """ - - annotation: Any - valid_cases: Iterable[Any] - invalid_cases: Iterable[Any] - - -def cases() -> Iterable[Case]: - # Gt, Ge, Lt, Le - yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Gt(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(date(2000, 1, 1))], - [date(2000, 1, 2), date(2000, 1, 3)], - [date(2000, 1, 1), date(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(Decimal('1.123'))], - [Decimal('1.1231'), Decimal('123')], - [Decimal('1.123'), Decimal('0')], - ) - - yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) - yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Ge(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(1998, 1, 1), datetime(1999, 12, 31)], - ) - - yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) - yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Lt(datetime(2000, 1, 1))], - [datetime(1999, 12, 31), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) - yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Le(datetime(2000, 1, 1))], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - # Interval - yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) - yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) - yield Case( - Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(2000, 1, 4)], - ) - - yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) - yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) - - # lengths - - yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - - yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - - yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) - yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) - - yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) - yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) - yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) - - # Timezone - - yield Case( - Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] - ) - yield Case( - Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] - ) - yield Case( - Annotated[datetime, at.Timezone(timezone.utc)], - [datetime(2000, 1, 1, tzinfo=timezone.utc)], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - yield Case( - Annotated[datetime, at.Timezone('Europe/London')], - [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - - # predicate types - - yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) - yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) - yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2']) - yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) - - yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) - - # custom GroupedMetadata - class MyCustomGroupedMetadata(at.GroupedMetadata): - def __iter__(self) -> Iterator[at.Predicate]: - yield at.Predicate(lambda x: float(x).is_integer()) - - yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/LICENSE b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/LICENSE deleted file mode 100644 index 525888d..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/LICENSE +++ /dev/null @@ -1,376 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -Copyright 2009-2022 Joshua Bronson. All rights reserved. - - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/METADATA deleted file mode 100644 index dc152d0..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/METADATA +++ /dev/null @@ -1,298 +0,0 @@ -Metadata-Version: 2.1 -Name: bidict -Version: 0.22.1 -Summary: The bidirectional mapping library for Python. -Author-email: Joshua Bronson -License: MPL 2.0 -Project-URL: Homepage, https://bidict.readthedocs.io -Project-URL: Repository, https://github.com/jab/bidict -Project-URL: Documentation, https://bidict.readthedocs.io -Project-URL: Donate, https://github.com/sponsors/jab -Project-URL: Changelog, https://bidict.readthedocs.io/changelog.html -Project-URL: Chat, https://gitter.im/jab/bidict -Project-URL: Source Code, https://github.com/jab/bidict -Project-URL: Issue Tracker, https://github.com/jab/bidict/issues -Project-URL: Enterprise Support, https://bidict.readthedocs.io/#enterprise-support -Keywords: bidict,bimap,bidirectional,dict,dictionary,mapping,collections -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: sphinx-copybutton ; extra == 'docs' -Requires-Dist: furo ; extra == 'docs' -Provides-Extra: lint -Requires-Dist: pre-commit ; extra == 'lint' -Provides-Extra: test -Requires-Dist: hypothesis ; extra == 'test' -Requires-Dist: pytest ; extra == 'test' -Requires-Dist: pytest-benchmark[histogram] ; extra == 'test' -Requires-Dist: pytest-cov ; extra == 'test' -Requires-Dist: pytest-xdist ; extra == 'test' -Requires-Dist: sortedcollections ; extra == 'test' -Requires-Dist: sortedcontainers ; extra == 'test' -Requires-Dist: sphinx ; extra == 'test' - -.. role:: doc -.. (Forward declaration for the "doc" role that Sphinx defines for interop with renderers that - are often used to show this doc and that are unaware of Sphinx (GitHub.com, PyPI.org, etc.). - Use :doc: rather than :ref: here for better interop as well.) - - -bidict -====== - -*The bidirectional mapping library for Python.* - - -Status ------- - -.. image:: https://img.shields.io/pypi/v/bidict.svg - :target: https://pypi.org/project/bidict - :alt: Latest release - -.. image:: https://img.shields.io/readthedocs/bidict/main.svg - :target: https://bidict.readthedocs.io/en/main/ - :alt: Documentation - -.. image:: https://github.com/jab/bidict/workflows/Tests/badge.svg - :target: https://github.com/jab/bidict/actions - :alt: GitHub Actions CI status - -.. image:: https://img.shields.io/pypi/l/bidict.svg - :target: https://raw.githubusercontent.com/jab/bidict/main/LICENSE - :alt: License - -.. image:: https://static.pepy.tech/badge/bidict - :target: https://pepy.tech/project/bidict - :alt: PyPI Downloads - -.. image:: https://img.shields.io/github/sponsors/jab - :target: https://github.com/sponsors/jab - :alt: Sponsors on GitHub - -.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4 - :target: https://github.com/sponsors/jab - :alt: Sponsor on GitHub - - -Features --------- - -- Depended on by - Google, Venmo, CERN, Baidu, Tencent, - and teams across the world since 2009 - -- Familiar, Pythonic APIs - that are carefully designed for - safety, simplicity, flexibility, and ergonomics - -- Lightweight, with no runtime dependencies - outside Python's standard library - -- Implemented in - concise, well-factored, fully type-hinted Python code - that is optimized for running efficiently - as well as for long-term maintenance and stability - (not to mention `joy <#learning-from-bidict>`__ :) - -- Extensively `documented `__ - -- 100% test coverage - running continuously across all supported Python versions - - -Installation ------------- - -``pip install bidict`` - - -Quick Start ------------ - -.. code:: python - - >>> from bidict import bidict - >>> element_by_symbol = bidict({'H': 'hydrogen'}) - >>> element_by_symbol['H'] - 'hydrogen' - >>> element_by_symbol.inverse['hydrogen'] - 'H' - - -For more usage documentation, -head to the :doc:`intro` [#fn-intro]_ -and proceed from there. - - -Enterprise Support ------------------- - -Enterprise-level support for bidict can be obtained via the -`Tidelift subscription `__ -or by `contacting me directly `__. - -I have a US-based LLC set up for invoicing, -and I have 15+ years of professional experience -delivering software and support to companies successfully. - -You can also sponsor my work through platforms like GitHub Sponsors. -See the `Sponsoring <#sponsoring>`__ section below for details, -including rationale and examples of companies -supporting the open source projects they depend on. - - -Voluntary Community Support ---------------------------- - -Please search through already-asked questions and answers -in `GitHub Discussions `__ -and the `issue tracker `__ -in case your question has already been addressed. - -Otherwise, please feel free to -`start a new discussion `__ -or `create a new issue `__ on GitHub, -or ask in the `bidict chatroom `__ -for voluntary community support. - - -Notice of Usage ---------------- - -If you use bidict, -and especially if your usage or your organization is significant in some way, -please let me know in any of the following ways: - -- `star bidict on GitHub `__ -- post in `GitHub Discussions `__ -- leave a message in the `chat room `__ -- `email me `__ - - -Changelog ---------- - -For bidict release notes, see the :doc:`changelog`. [#fn-changelog]_ - - -Release Notifications ---------------------- - -.. duplicated in CHANGELOG.rst: - (would use `.. include::` but GitHub doesn't understand it) - -Watch `bidict releases on GitHub `__ -to be notified when new versions of bidict are published. -Click the "Watch" dropdown, choose "Custom", and then choose "Releases". - - -Learning from bidict --------------------- - -One of the best things about bidict -is that it touches a surprising number of -interesting Python corners, -especially given its small size and scope. - -Check out :doc:`learning-from-bidict` [#fn-learning]_ -if you're interested in learning more. - - -Contributing ------------- - -I have been bidict's sole maintainer -and `active contributor `__ -since I started the project ~15 years ago. - -Your help would be most welcome! -See the :doc:`contributors-guide` [#fn-contributing]_ -for more information. - - -Sponsoring ----------- - -.. duplicated in CONTRIBUTING.rst - (would use `.. include::` but GitHub doesn't understand it) - -.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4 - :target: https://github.com/sponsors/jab - :alt: Sponsor through GitHub - -.. image:: https://img.shields.io/github/sponsors/jab - :target: https://github.com/sponsors/jab - :alt: Sponsors on GitHub - -Bidict is the product of thousands of hours of my unpaid work -over the ~15 years that I've been the sole maintainer. - -If bidict has helped you or your company accomplish your work, -please `sponsor my work through GitHub `__ -and/or ask your company to do the same. - -Choose a tier and GitHub handles everything else. -Your GitHub sponsorship will automatically go -on the same bill you already have set up with GitHub, -so after the one-click signup, there's nothing else to do. - -See the following for rationale and examples of companies -supporting the open source projects they depend on -in this manner: - -- ``__ -- ``__ -- ``__ - -.. - ``__ -.. - ``__ -.. - ``__ -.. - ``__ - -You can also support my work through -`Gumroad `__ or -`PayPal `__, -or through a support engagement with my LLC. -See `Enterprise Support <#enterprise-support>`__ -above for details. - - -Finding Documentation ---------------------- - -If you're viewing this on ``__, -note that multiple versions of the documentation are available, -and you can choose a different version using the popup menu at the bottom-right. -Please make sure you're viewing the version of the documentation -that corresponds to the version of bidict you'd like to use. - -If you're viewing this on GitHub, PyPI, or some other place -that can't render and link this documentation properly -and are seeing broken links, -try these alternate links instead: - -.. [#fn-intro] ``__ | ``__ - -.. [#fn-changelog] ``__ | ``__ - -.. [#fn-learning] ``__ | ``__ - -.. [#fn-contributing] ``__ | ``__ - - -.. image:: https://static.scarf.sh/a.png?x-pxid=05e3c4e4-eaa7-41a1-84c2-ec14413115f8 diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/RECORD deleted file mode 100644 index 0d8fcc6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/RECORD +++ /dev/null @@ -1,35 +0,0 @@ -bidict-0.22.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -bidict-0.22.1.dist-info/LICENSE,sha256=vgSHOYu6COd3K0B3tjd4e5SXO1PESzy91sfasQ7RUDU,16784 -bidict-0.22.1.dist-info/METADATA,sha256=tL98SDf3m4_wbLHauTk_5d9H2No-JFaIxjpppoDH2eY,10214 -bidict-0.22.1.dist-info/RECORD,, -bidict-0.22.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -bidict-0.22.1.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7 -bidict/__init__.py,sha256=MkSZDzvPG0w3qUYhGK7-y_v1ptSXsj_AN_ms-Eaf_gg,4402 -bidict/__pycache__/__init__.cpython-311.pyc,, -bidict/__pycache__/_abc.cpython-311.pyc,, -bidict/__pycache__/_base.cpython-311.pyc,, -bidict/__pycache__/_bidict.cpython-311.pyc,, -bidict/__pycache__/_dup.cpython-311.pyc,, -bidict/__pycache__/_exc.cpython-311.pyc,, -bidict/__pycache__/_frozenbidict.cpython-311.pyc,, -bidict/__pycache__/_frozenordered.cpython-311.pyc,, -bidict/__pycache__/_iter.cpython-311.pyc,, -bidict/__pycache__/_named.cpython-311.pyc,, -bidict/__pycache__/_orderedbase.cpython-311.pyc,, -bidict/__pycache__/_orderedbidict.cpython-311.pyc,, -bidict/__pycache__/_typing.cpython-311.pyc,, -bidict/__pycache__/metadata.cpython-311.pyc,, -bidict/_abc.py,sha256=4fpjOPr8DmTkHaVxQycj_UpjcCnTCg0HyM6tIChCUAk,3161 -bidict/_base.py,sha256=tH8Xt2bYkDXRK_hvKxtpEEU8varu2HJpxiDldQ4FhxA,24395 -bidict/_bidict.py,sha256=XrthGSjDIHt9NYAWJ1bogawflZFzYMVQUgEcIA7n3UA,7293 -bidict/_dup.py,sha256=lYD9eqXYcYQ7V5UkcbmQPUZRLpMm7imSFY8isb1p-iw,1986 -bidict/_exc.py,sha256=YccvFsU_44tu0D__pNh2sPn0EoIt_1H_ZKNJsbY4Svo,1065 -bidict/_frozenbidict.py,sha256=lMCPYVovjRBPP7mAJm_lhRnAh1VP2zxHFmfWJjvA79M,1830 -bidict/_frozenordered.py,sha256=t6RIm9xoAp3bv3vZHSpFvqmEitEBhv0XBAFTI8SK7iE,2115 -bidict/_iter.py,sha256=H5UTVAKp0G_V6ZlaqvyRYQyww-WnribAZm4kiaUBnug,1537 -bidict/_named.py,sha256=r_r7VZDk62oRbtwEvF-z42zDQfajnXlQGxZaa1nlKjQ,4006 -bidict/_orderedbase.py,sha256=r5lw22Z2VShEQnjdY-YoWVaHBYb_asTY9xFoYmKv4ww,9248 -bidict/_orderedbidict.py,sha256=v8EWAOVDIMAM2XxbKgiGFU_p8noWd_tbjNrdlQ-sBEE,6940 -bidict/_typing.py,sha256=F9fJScCGi-36vZI8bw7C0uZvHWl_i6gt94sD7zK09ww,1166 -bidict/metadata.py,sha256=V0wvZfLwi6nO5eNm6HvUCtISAi2J0G5zty0kf_ETLEw,574 -bidict/py.typed,sha256=RJao5SVFYIp8IfbxhL_SpZkBQYe3XXzPlobSRdh4B_c,16 diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/WHEEL deleted file mode 100644 index 57e3d84..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/top_level.txt deleted file mode 100644 index 6ff5b04..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict-0.22.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -bidict diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/__init__.py deleted file mode 100644 index 7ad180c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/__init__.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -#============================================================================== -# * Welcome to the bidict source code * -#============================================================================== - -# Reading through the code? You'll find a "Code review nav" comment like the one -# below at the top and bottom of the key source files. Follow these cues to take -# a path through the code that's optimized for familiarizing yourself with it. -# -# If you're not reading this on https://github.com/jab/bidict already, go there -# to ensure you have the latest version of the code. While there, you can also -# star the project, watch it for updates, fork the code, and submit an issue or -# pull request with any proposed changes. More information can be found linked -# from README.rst, which is also shown on https://github.com/jab/bidict. - -# * Code review nav * -#============================================================================== -# Current: __init__.py Next: _abc.py → -#============================================================================== - - -"""The bidirectional mapping library for Python. - ----- - -bidict by example: - -.. code-block:: python - - >>> from bidict import bidict - >>> element_by_symbol = bidict({'H': 'hydrogen'}) - >>> element_by_symbol['H'] - 'hydrogen' - >>> element_by_symbol.inverse['hydrogen'] - 'H' - - -Please see https://github.com/jab/bidict for the most up-to-date code and -https://bidict.readthedocs.io for the most up-to-date documentation -if you are reading this elsewhere. - ----- - -.. :copyright: (c) 2009-2022 Joshua Bronson. -.. :license: MPLv2. See LICENSE for details. -""" - -# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members). -from __future__ import annotations as _annotations -from sys import version_info as _version_info - - -if _version_info < (3, 7): # pragma: no cover - raise ImportError('Python 3.7+ is required.') - - -from contextlib import suppress as _suppress - -from ._abc import BidirectionalMapping as BidirectionalMapping, MutableBidirectionalMapping as MutableBidirectionalMapping -from ._base import BidictBase as BidictBase, GeneratedBidictInverse as GeneratedBidictInverse, BidictKeysView as BidictKeysView -from ._bidict import MutableBidict as MutableBidict, bidict as bidict -from ._frozenbidict import frozenbidict as frozenbidict -from ._frozenordered import FrozenOrderedBidict as FrozenOrderedBidict -from ._named import NamedBidictBase as NamedBidictBase, namedbidict as namedbidict -from ._orderedbase import OrderedBidictBase as OrderedBidictBase -from ._orderedbidict import OrderedBidict as OrderedBidict -from ._dup import ON_DUP_DEFAULT as ON_DUP_DEFAULT, ON_DUP_RAISE as ON_DUP_RAISE, ON_DUP_DROP_OLD as ON_DUP_DROP_OLD -from ._dup import RAISE as RAISE, DROP_OLD as DROP_OLD, DROP_NEW as DROP_NEW, OnDup as OnDup, OD as OD -from ._exc import BidictException as BidictException, DuplicationError as DuplicationError -from ._exc import KeyDuplicationError as KeyDuplicationError, ValueDuplicationError as ValueDuplicationError, KeyAndValueDuplicationError as KeyAndValueDuplicationError -from ._iter import inverted as inverted -from .metadata import ( - __author__ as __author__, __copyright__ as __copyright__, __description__ as __description__, - __license__ as __license__, __url__ as __url__, __version__ as __version__, -) - - -#: Alias -OnDupAction = OD - - -# Set __module__ of re-exported classes to the 'bidict' top-level module, so that e.g. -# 'bidict.bidict' shows up as 'bidict.bidict` rather than 'bidict._bidict.bidict'. -for _obj in tuple(locals().values()): # pragma: no cover - if not getattr(_obj, '__module__', '').startswith('bidict.'): - continue - with _suppress(AttributeError): - _obj.__module__ = 'bidict' - - -# * Code review nav * -#============================================================================== -# Current: __init__.py Next: _abc.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index a956370..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_abc.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_abc.cpython-311.pyc deleted file mode 100644 index 50872b4..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_abc.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_base.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_base.cpython-311.pyc deleted file mode 100644 index 45ecc9b..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_base.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_bidict.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_bidict.cpython-311.pyc deleted file mode 100644 index f3917f5..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_bidict.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_dup.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_dup.cpython-311.pyc deleted file mode 100644 index 6dc6a6c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_dup.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_exc.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_exc.cpython-311.pyc deleted file mode 100644 index 018dbd5..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_exc.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenbidict.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenbidict.cpython-311.pyc deleted file mode 100644 index d8dbe51..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenbidict.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenordered.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenordered.cpython-311.pyc deleted file mode 100644 index a7f08b2..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_frozenordered.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_iter.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_iter.cpython-311.pyc deleted file mode 100644 index 590a296..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_iter.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_named.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_named.cpython-311.pyc deleted file mode 100644 index 5633cdf..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_named.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbase.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbase.cpython-311.pyc deleted file mode 100644 index 7c8966b..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbase.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbidict.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbidict.cpython-311.pyc deleted file mode 100644 index 0cef973..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_orderedbidict.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_typing.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_typing.cpython-311.pyc deleted file mode 100644 index 21cce9f..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/_typing.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/metadata.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/metadata.cpython-311.pyc deleted file mode 100644 index da501a2..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/bidict/__pycache__/metadata.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_abc.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_abc.py deleted file mode 100644 index 280d1c1..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_abc.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: __init__.py Current: _abc.py Next: _base.py → -#============================================================================== - - -"""Provide the :class:`BidirectionalMapping` abstract base class.""" - -from __future__ import annotations -from abc import abstractmethod -import typing as t - -from ._typing import KT, VT - - -class BidirectionalMapping(t.Mapping[KT, VT]): - """Abstract base class for bidirectional mapping types. - - Extends :class:`collections.abc.Mapping` primarily by adding the - (abstract) :attr:`inverse` property, - which implementors of :class:`BidirectionalMapping` - should override to return a reference to the inverse - :class:`BidirectionalMapping` instance. - """ - - __slots__ = () - - @property - @abstractmethod - def inverse(self) -> BidirectionalMapping[VT, KT]: - """The inverse of this bidirectional mapping instance. - - *See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv` - - :raises NotImplementedError: Meant to be overridden in subclasses. - """ - # The @abstractmethod decorator prevents BidirectionalMapping subclasses from being - # instantiated unless they override ``.inverse``. So this implementation of ``.inverse`` - # should never be unintentionally resolved from subclass instances. But raise here - # anyway, so it's extra clear that this implementation should never be called. - raise NotImplementedError - - def __inverted__(self) -> t.Iterator[tuple[VT, KT]]: - """Get an iterator over the items in :attr:`inverse`. - - This is functionally equivalent to iterating over the items in the - forward mapping and inverting each one on the fly, but this provides a - more efficient implementation: Assuming the already-inverted items - are stored in :attr:`inverse`, just return an iterator over them directly. - - Providing this default implementation enables external functions, - particularly :func:`~bidict.inverted`, to use this optimized - implementation when available, instead of having to invert on the fly. - - *See also* :func:`bidict.inverted` - """ - return iter(self.inverse.items()) - - -class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], t.MutableMapping[KT, VT]): - """Abstract base class for mutable bidirectional mapping types.""" - - __slots__ = () - - -# * Code review nav * -#============================================================================== -# ← Prev: __init__.py Current: _abc.py Next: _base.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_base.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_base.py deleted file mode 100644 index 97be9d4..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_base.py +++ /dev/null @@ -1,552 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py → -#============================================================================== - - -"""Provide :class:`BidictBase`.""" - -from __future__ import annotations -from functools import partial -from itertools import starmap -from operator import eq -from types import MappingProxyType -import typing as t -import weakref - -from ._abc import BidirectionalMapping -from ._dup import ON_DUP_DEFAULT, RAISE, DROP_OLD, DROP_NEW, OnDup -from ._exc import DuplicationError, KeyDuplicationError, ValueDuplicationError, KeyAndValueDuplicationError -from ._iter import iteritems, inverted -from ._typing import KT, VT, MISSING, OKT, OVT, Items, MapOrItems, TypeAlias - - -OldKV: TypeAlias = 'tuple[OKT[KT], OVT[VT]]' -DedupResult: TypeAlias = 'OldKV[KT, VT] | None' -Write: TypeAlias = 'list[t.Callable[[], None]]' -Unwrite: TypeAlias = Write -PreparedWrite: TypeAlias = 'tuple[Write, Unwrite]' -BT = t.TypeVar('BT', bound='BidictBase[t.Any, t.Any]') - - -class BidictKeysView(t.KeysView[KT], t.ValuesView[KT]): - """Since the keys of a bidict are the values of its inverse (and vice versa), - the :class:`~collections.abc.ValuesView` result of calling *bi.values()* - is also a :class:`~collections.abc.KeysView` of *bi.inverse*. - """ - - -def get_arg(*args: MapOrItems[KT, VT]) -> MapOrItems[KT, VT]: - """Ensure there's only a single arg in *args*, then return it.""" - if len(args) > 1: - raise TypeError(f'Expected at most 1 positional argument, got {len(args)}') - return args[0] if args else () - - -class BidictBase(BidirectionalMapping[KT, VT]): - """Base class implementing :class:`BidirectionalMapping`.""" - - #: The default :class:`~bidict.OnDup` - #: that governs behavior when a provided item - #: duplicates the key or value of other item(s). - #: - #: *See also* - #: :ref:`basic-usage:Values Must Be Unique` (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique), - #: :doc:`extending` (https://bidict.rtfd.io/extending.html) - on_dup = ON_DUP_DEFAULT - - _fwdm: t.MutableMapping[KT, VT] #: the backing forward mapping (*key* → *val*) - _invm: t.MutableMapping[VT, KT] #: the backing inverse mapping (*val* → *key*) - - # Use Any rather than KT/VT in the following to avoid "ClassVar cannot contain type variables" errors: - _fwdm_cls: t.ClassVar[t.Type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing forward mapping - _invm_cls: t.ClassVar[t.Type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing inverse mapping - - #: The class of the inverse bidict instance. - _inv_cls: t.ClassVar[t.Type[BidictBase[t.Any, t.Any]]] - - #: Used by :meth:`__repr__` for the contained items. - _repr_delegate: t.ClassVar[t.Any] = dict - - def __init_subclass__(cls) -> None: - super().__init_subclass__() - cls._init_class() - - @classmethod - def _init_class(cls) -> None: - cls._ensure_inv_cls() - cls._set_reversed() - - __reversed__: t.Any - - @classmethod - def _set_reversed(cls) -> None: - """Set __reversed__ for subclasses that do not set it explicitly - according to whether backing mappings are reversible. - """ - if cls is not BidictBase: - resolved = cls.__reversed__ - overridden = resolved is not BidictBase.__reversed__ - if overridden: # E.g. OrderedBidictBase, OrderedBidict, FrozenOrderedBidict - return - # The following will be False for MutableBidict, bidict, and frozenbidict on Python < 3.8, - # and True for them on 3.8+ (where dicts are reversible). Will also be True for custom - # subclasses like SortedBidict (see https://bidict.rtfd.io/extending.html#sortedbidict-recipes). - backing_reversible = all(issubclass(i, t.Reversible) for i in (cls._fwdm_cls, cls._invm_cls)) - cls.__reversed__ = _fwdm_reversed if backing_reversible else None - - @classmethod - def _ensure_inv_cls(cls) -> None: - """Ensure :attr:`_inv_cls` is set, computing it dynamically if necessary. - - See: :ref:`extending:Dynamic Inverse Class Generation` - (https://bidict.rtfd.io/extending.html#dynamic-inverse-class-generation) - - Most subclasses will be their own inverse classes, but some - (e.g. those created via namedbidict) will have distinct inverse classes. - """ - if cls.__dict__.get('_inv_cls'): - return # Already set, nothing to do. - cls._inv_cls = cls._make_inv_cls() - - @classmethod - def _make_inv_cls(cls: t.Type[BT], _miss: t.Any = object()) -> t.Type[BT]: - diff = cls._inv_cls_dict_diff() - cls_is_own_inv = all(getattr(cls, k, _miss) == v for (k, v) in diff.items()) - if cls_is_own_inv: - return cls - # Suppress auto-calculation of _inv_cls's _inv_cls since we know it already. - # Works with the guard in BidictBase._ensure_inv_cls() to prevent infinite recursion. - diff['_inv_cls'] = cls - inv_cls = type(f'{cls.__name__}Inv', (cls, GeneratedBidictInverse), diff) - inv_cls.__module__ = cls.__module__ - return t.cast(t.Type[BT], inv_cls) - - @classmethod - def _inv_cls_dict_diff(cls) -> dict[str, t.Any]: - return { - '_fwdm_cls': cls._invm_cls, - '_invm_cls': cls._fwdm_cls, - } - - @t.overload - def __init__(self, **kw: VT) -> None: ... - @t.overload - def __init__(self, __m: t.Mapping[KT, VT], **kw: VT) -> None: ... - @t.overload - def __init__(self, __i: Items[KT, VT], **kw: VT) -> None: ... - - def __init__(self, *args: MapOrItems[KT, VT], **kw: VT) -> None: - """Make a new bidirectional mapping. - The signature behaves like that of :class:`dict`. - Items passed in are added in the order they are passed, - respecting the :attr:`on_dup` class attribute in the process. - """ - self._fwdm = self._fwdm_cls() - self._invm = self._invm_cls() - if args or kw: - self._update(get_arg(*args), kw, rbof=False) - - # If Python ever adds support for higher-kinded types, `inverse` could use them, e.g. - # def inverse(self: BT[KT, VT]) -> BT[VT, KT]: - # Ref: https://github.com/python/typing/issues/548#issuecomment-621571821 - @property - def inverse(self) -> BidictBase[VT, KT]: - """The inverse of this bidirectional mapping instance.""" - # When `bi.inverse` is called for the first time, this method - # computes the inverse instance, stores it for subsequent use, and then - # returns it. It also stores a reference on `bi.inverse` back to `bi`, - # but uses a weakref to avoid creating a reference cycle. Strong references - # to inverse instances are stored in ._inv, and weak references are stored - # in ._invweak. - - # First check if a strong reference is already stored. - inv: BidictBase[VT, KT] | None = getattr(self, '_inv', None) - if inv is not None: - return inv - # Next check if a weak reference is already stored. - invweak = getattr(self, '_invweak', None) - if invweak is not None: - inv = invweak() # Try to resolve a strong reference and return it. - if inv is not None: - return inv - # No luck. Compute the inverse reference and store it for subsequent use. - inv = self._make_inverse() - self._inv: BidictBase[VT, KT] | None = inv - self._invweak: weakref.ReferenceType[BidictBase[VT, KT]] | None = None - # Also store a weak reference back to `instance` on its inverse instance, so that - # the second `.inverse` access in `bi.inverse.inverse` hits the cached weakref. - inv._inv = None - inv._invweak = weakref.ref(self) - # In e.g. `bidict().inverse.inverse`, this design ensures that a strong reference - # back to the original instance is retained before its refcount drops to zero, - # avoiding an unintended potential deallocation. - return inv - - def _make_inverse(self) -> BidictBase[VT, KT]: - inv: BidictBase[VT, KT] = self._inv_cls() - inv._fwdm = self._invm - inv._invm = self._fwdm - return inv - - @property - def inv(self) -> BidictBase[VT, KT]: - """Alias for :attr:`inverse`.""" - return self.inverse - - def __repr__(self) -> str: - """See :func:`repr`.""" - clsname = self.__class__.__name__ - items = self._repr_delegate(self.items()) if self else '' - return f'{clsname}({items})' - - def values(self) -> BidictKeysView[VT]: - """A set-like object providing a view on the contained values. - - Since the values of a bidict are equivalent to the keys of its inverse, - this method returns a set-like object for this bidict's values - rather than just a collections.abc.ValuesView. - This object supports set operations like union and difference, - and constant- rather than linear-time containment checks, - and is no more expensive to provide than the less capable - collections.abc.ValuesView would be. - - See :meth:`keys` for more information. - """ - return t.cast(BidictKeysView[VT], self.inverse.keys()) - - def keys(self) -> t.KeysView[KT]: - """A set-like object providing a view on the contained keys. - - When *b._fwdm* is a :class:`dict`, *b.keys()* returns a - *dict_keys* object that behaves exactly the same as - *collections.abc.KeysView(b)*, except for - - - offering better performance - - - being reversible on Python 3.8+ - - - having a .mapping attribute in Python 3.10+ - that exposes a mappingproxy to *b._fwdm*. - """ - fwdm = self._fwdm - kv = fwdm.keys() if isinstance(fwdm, dict) else BidictKeysView(self) - return kv - - def items(self) -> t.ItemsView[KT, VT]: - """A set-like object providing a view on the contained items. - - When *b._fwdm* is a :class:`dict`, *b.items()* returns a - *dict_items* object that behaves exactly the same as - *collections.abc.ItemsView(b)*, except for: - - - offering better performance - - - being reversible on Python 3.8+ - - - having a .mapping attribute in Python 3.10+ - that exposes a mappingproxy to *b._fwdm*. - """ - return self._fwdm.items() if isinstance(self._fwdm, dict) else super().items() - - # The inherited collections.abc.Mapping.__contains__() method is implemented by doing a `try` - # `except KeyError` around `self[key]`. The following implementation is much faster, - # especially in the missing case. - def __contains__(self, key: t.Any) -> bool: - """True if the mapping contains the specified key, else False.""" - return key in self._fwdm - - # The inherited collections.abc.Mapping.__eq__() method is implemented in terms of an inefficient - # `dict(self.items()) == dict(other.items())` comparison, so override it with a - # more efficient implementation. - def __eq__(self, other: object) -> bool: - """*x.__eq__(other) ⟺ x == other* - - Equivalent to *dict(x.items()) == dict(other.items())* - but more efficient. - - Note that :meth:`bidict's __eq__() ` implementation - is inherited by subclasses, - in particular by the ordered bidict subclasses, - so even with ordered bidicts, - :ref:`== comparison is order-insensitive ` - (https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive). - - *See also* :meth:`equals_order_sensitive` - """ - if isinstance(other, t.Mapping): - return self._fwdm.items() == other.items() - # Ref: https://docs.python.org/3/library/constants.html#NotImplemented - return NotImplemented - - def equals_order_sensitive(self, other: object) -> bool: - """Order-sensitive equality check. - - *See also* :ref:`eq-order-insensitive` - (https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive) - """ - if not isinstance(other, t.Mapping) or len(self) != len(other): - return False - return all(starmap(eq, zip(self.items(), other.items()))) - - def _dedup(self, key: KT, val: VT, on_dup: OnDup) -> DedupResult[KT, VT]: - """Check *key* and *val* for any duplication in self. - - Handle any duplication as per the passed in *on_dup*. - - If (key, val) is already present, return None - since writing (key, val) would be a no-op. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.DROP_NEW`, return None. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.RAISE`, raise the appropriate exception. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.DROP_OLD`, or if no duplication is found, - return *(oldkey, oldval)*. - """ - fwdm, invm = self._fwdm, self._invm - oldval: OVT[VT] = fwdm.get(key, MISSING) - oldkey: OKT[KT] = invm.get(val, MISSING) - isdupkey, isdupval = oldval is not MISSING, oldkey is not MISSING - if isdupkey and isdupval: - if key == oldkey: - assert val == oldval - # (key, val) duplicates an existing item -> no-op. - return None - # key and val each duplicate a different existing item. - if on_dup.kv is RAISE: - raise KeyAndValueDuplicationError(key, val) - if on_dup.kv is DROP_NEW: - return None - assert on_dup.kv is DROP_OLD - # Fall through to the return statement on the last line. - elif isdupkey: - if on_dup.key is RAISE: - raise KeyDuplicationError(key) - if on_dup.key is DROP_NEW: - return None - assert on_dup.key is DROP_OLD - # Fall through to the return statement on the last line. - elif isdupval: - if on_dup.val is RAISE: - raise ValueDuplicationError(val) - if on_dup.val is DROP_NEW: - return None - assert on_dup.val is DROP_OLD - # Fall through to the return statement on the last line. - # else neither isdupkey nor isdupval. - return oldkey, oldval - - def _prep_write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], save_unwrite: bool) -> PreparedWrite: - """Given (newkey, newval) to insert, return the list of operations necessary to perform the write. - - *oldkey* and *oldval* are as returned by :meth:`_dedup`. - - If *save_unwrite* is true, also return the list of inverse operations necessary to undo the write. - This design allows :meth:`_update` to roll back a partially applied update that fails part-way through - when necessary. This design also allows subclasses that require additional operations to complete - a write to easily extend this implementation. For example, :class:`bidict.OrderedBidictBase` calls this - inherited implementation, and then extends the list of ops returned with additional operations - needed to keep its internal linked list nodes consistent with its items' order as changes are made. - """ - fwdm, invm = self._fwdm, self._invm - write: list[t.Callable[[], None]] = [ - partial(fwdm.__setitem__, newkey, newval), - partial(invm.__setitem__, newval, newkey), - ] - unwrite: list[t.Callable[[], None]] - if oldval is MISSING and oldkey is MISSING: # no key or value duplication - # {0: 1, 2: 3} + (4, 5) => {0: 1, 2: 3, 4: 5} - unwrite = [ - partial(fwdm.__delitem__, newkey), - partial(invm.__delitem__, newval), - ] if save_unwrite else [] - elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items - # {0: 1, 2: 3} + (0, 3) => {0: 3} - write.extend(( - partial(fwdm.__delitem__, oldkey), - partial(invm.__delitem__, oldval), - )) - unwrite = [ - partial(fwdm.__setitem__, newkey, oldval), - partial(invm.__setitem__, oldval, newkey), - partial(fwdm.__setitem__, oldkey, newval), - partial(invm.__setitem__, newval, oldkey), - ] if save_unwrite else [] - elif oldval is not MISSING: # just key duplication - # {0: 1, 2: 3} + (2, 4) => {0: 1, 2: 4} - write.append(partial(invm.__delitem__, oldval)) - unwrite = [ - partial(fwdm.__setitem__, newkey, oldval), - partial(invm.__setitem__, oldval, newkey), - partial(invm.__delitem__, newval), - ] if save_unwrite else [] - else: - assert oldkey is not MISSING # just value duplication - # {0: 1, 2: 3} + (4, 3) => {0: 1, 4: 3} - write.append(partial(fwdm.__delitem__, oldkey)) - unwrite = [ - partial(fwdm.__setitem__, oldkey, newval), - partial(invm.__setitem__, newval, oldkey), - partial(fwdm.__delitem__, newkey), - ] if save_unwrite else [] - return write, unwrite - - def _update( - self, - arg: MapOrItems[KT, VT], - kw: t.Mapping[str, VT] = MappingProxyType({}), - *, - rbof: bool | None = None, - on_dup: OnDup | None = None, - ) -> None: - """Update, possibly rolling back on failure as per *rbof*.""" - # Must process input in a single pass, since arg may be a generator. - if not arg and not kw: - return - if on_dup is None: - on_dup = self.on_dup - if rbof is None: - rbof = RAISE in on_dup - if not self and not kw: - if isinstance(arg, BidictBase): # can skip dup check - self._init_from(arg) - return - # If arg is not a BidictBase, fall through to the general treatment below, - # which includes duplication checking. (If arg is some BidirectionalMapping - # that does not inherit from BidictBase, it's a foreign implementation, so we - # perform duplication checking to err on the safe side.) - - # If we roll back on failure and we know that there are more updates to process than - # already-contained items, our rollback strategy is to update a copy of self (without - # rolling back on failure), and then to become the copy if all updates succeed. - if rbof and isinstance(arg, t.Sized) and len(arg) + len(kw) > len(self): - target = self.copy() - target._update(arg, kw, rbof=False, on_dup=on_dup) - self._init_from(target) - return - - # There are more already-contained items than updates to process, or we don't know - # how many updates there are to process. If we need to roll back on failure, - # save a log of Unwrites as we update so we can undo changes if the update fails. - unwrites: list[Unwrite] = [] - append_unwrite = unwrites.append - prep_write = self._prep_write - for (key, val) in iteritems(arg, **kw): - try: - dedup_result = self._dedup(key, val, on_dup) - except DuplicationError: - if rbof: - while unwrites: # apply saved unwrites - unwrite = unwrites.pop() - for unwriteop in unwrite: - unwriteop() - raise - if dedup_result is None: # no-op - continue - write, unwrite = prep_write(key, val, *dedup_result, save_unwrite=rbof) - for writeop in write: # apply the write - writeop() - if rbof and unwrite: # save the unwrite for later application if needed - append_unwrite(unwrite) - - def copy(self: BT) -> BT: - """Make a (shallow) copy of this bidict.""" - # Could just `return self.__class__(self)` here, but the below is faster. The former - # would copy this bidict's items into a new instance one at a time (checking for duplication - # for each item), whereas the below copies from the backing mappings all at once, and foregoes - # item-by-item duplication checking since the backing mappings have been checked already. - return self._from_other(self.__class__, self) - - @staticmethod - def _from_other(bt: t.Type[BT], other: MapOrItems[KT, VT], inv: bool = False) -> BT: - """Fast, private constructor based on :meth:`_init_from`. - - If *inv* is true, return the inverse of the instance instead of the instance itself. - (Useful for pickling with dynamically-generated inverse classes -- see :meth:`__reduce__`.) - """ - inst = bt() - inst._init_from(other) - return t.cast(BT, inst.inverse) if inv else inst - - def _init_from(self, other: MapOrItems[KT, VT]) -> None: - """Fast init from *other*, bypassing item-by-item duplication checking.""" - self._fwdm.clear() - self._invm.clear() - self._fwdm.update(other) - # If other is a bidict, use its existing backing inverse mapping, otherwise - # other could be a generator that's now exhausted, so invert self._fwdm on the fly. - inv = other.inverse if isinstance(other, BidictBase) else inverted(self._fwdm) - self._invm.update(inv) - - #: Used for the copy protocol. - #: *See also* the :mod:`copy` module - __copy__ = copy - - def __or__(self: BT, other: t.Mapping[KT, VT]) -> BT: - """Return self|other.""" - if not isinstance(other, t.Mapping): - return NotImplemented - new = self.copy() - new._update(other, rbof=False) - return new - - def __ror__(self: BT, other: t.Mapping[KT, VT]) -> BT: - """Return other|self.""" - if not isinstance(other, t.Mapping): - return NotImplemented - new = self.__class__(other) - new._update(self, rbof=False) - return new - - def __len__(self) -> int: - """The number of contained items.""" - return len(self._fwdm) - - def __iter__(self) -> t.Iterator[KT]: - """Iterator over the contained keys.""" - return iter(self._fwdm) - - def __getitem__(self, key: KT) -> VT: - """*x.__getitem__(key) ⟺ x[key]*""" - return self._fwdm[key] - - def __reduce__(self) -> tuple[t.Any, ...]: - """Return state information for pickling.""" - # If this bidict's class is dynamically generated, pickle the inverse instead, whose - # (presumably not dynamically generated) class the caller is more likely to have a reference to - # somewhere in sys.modules that pickle can discover. - should_invert = isinstance(self, GeneratedBidictInverse) - cls, init_from = (self._inv_cls, self.inverse) if should_invert else (self.__class__, self) - return self._from_other, (cls, dict(init_from), should_invert) # type: ignore [call-overload] - - -# See BidictBase._set_reversed() above. -def _fwdm_reversed(self: BidictBase[KT, t.Any]) -> t.Iterator[KT]: - """Iterator over the contained keys in reverse order.""" - assert isinstance(self._fwdm, t.Reversible) - return reversed(self._fwdm) - - -BidictBase._init_class() - - -class GeneratedBidictInverse: - """Base class for dynamically-generated inverse bidict classes.""" - - -# * Code review nav * -#============================================================================== -# ← Prev: _abc.py Current: _base.py Next: _frozenbidict.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_bidict.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_bidict.py deleted file mode 100644 index 2cd1f54..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_bidict.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: _frozenbidict.py Current: _bidict.py Next: _orderedbase.py → -#============================================================================== - - -"""Provide :class:`MutableBidict`.""" - -from __future__ import annotations -import typing as t - -from ._abc import MutableBidirectionalMapping -from ._base import BidictBase, get_arg -from ._dup import OnDup, ON_DUP_RAISE, ON_DUP_DROP_OLD -from ._typing import KT, VT, DT, ODT, MISSING, Items, MapOrItems - - -class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]): - """Base class for mutable bidirectional mappings.""" - - if t.TYPE_CHECKING: - @property - def inverse(self) -> MutableBidict[VT, KT]: ... - - def _pop(self, key: KT) -> VT: - val = self._fwdm.pop(key) - del self._invm[val] - return val - - def __delitem__(self, key: KT) -> None: - """*x.__delitem__(y) ⟺ del x[y]*""" - self._pop(key) - - def __setitem__(self, key: KT, val: VT) -> None: - """Set the value for *key* to *val*. - - If *key* is already associated with *val*, this is a no-op. - - If *key* is already associated with a different value, - the old value will be replaced with *val*, - as with dict's :meth:`__setitem__`. - - If *val* is already associated with a different key, - an exception is raised - to protect against accidental removal of the key - that's currently associated with *val*. - - Use :meth:`put` instead if you want to specify different behavior in - the case that the provided key or value duplicates an existing one. - Or use :meth:`forceput` to unconditionally associate *key* with *val*, - replacing any existing items as necessary to preserve uniqueness. - - :raises bidict.ValueDuplicationError: if *val* duplicates that of an - existing item. - - :raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an - existing item and *val* duplicates the value of a different - existing item. - """ - self.put(key, val, on_dup=self.on_dup) - - def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None: - """Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*. - - For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`, - then *key* will be associated with *val* if and only if - *key* is not already associated with an existing value and - *val* is not already associated with an existing key, - otherwise an exception will be raised. - - If *key* is already associated with *val*, this is a no-op. - - :raises bidict.KeyDuplicationError: if attempting to insert an item - whose key only duplicates an existing item's, and *on_dup.key* is - :attr:`~bidict.RAISE`. - - :raises bidict.ValueDuplicationError: if attempting to insert an item - whose value only duplicates an existing item's, and *on_dup.val* is - :attr:`~bidict.RAISE`. - - :raises bidict.KeyAndValueDuplicationError: if attempting to insert an - item whose key duplicates one existing item's, and whose value - duplicates another existing item's, and *on_dup.kv* is - :attr:`~bidict.RAISE`. - """ - self._update([(key, val)], on_dup=on_dup) - - def forceput(self, key: KT, val: VT) -> None: - """Associate *key* with *val* unconditionally. - - Replace any existing mappings containing key *key* or value *val* - as necessary to preserve uniqueness. - """ - self.put(key, val, on_dup=ON_DUP_DROP_OLD) - - def clear(self) -> None: - """Remove all items.""" - self._fwdm.clear() - self._invm.clear() - - @t.overload - def pop(self, __key: KT) -> VT: ... - @t.overload - def pop(self, __key: KT, __default: DT = ...) -> VT | DT: ... - - def pop(self, key: KT, default: ODT[DT] = MISSING) -> VT | DT: - """*x.pop(k[, d]) → v* - - Remove specified key and return the corresponding value. - - :raises KeyError: if *key* is not found and no *default* is provided. - """ - try: - return self._pop(key) - except KeyError: - if default is MISSING: - raise - return default - - def popitem(self) -> tuple[KT, VT]: - """*x.popitem() → (k, v)* - - Remove and return some item as a (key, value) pair. - - :raises KeyError: if *x* is empty. - """ - key, val = self._fwdm.popitem() - del self._invm[val] - return key, val - - @t.overload # type: ignore [override] # https://github.com/jab/bidict/pull/242#discussion_r825464731 - def update(self, __m: t.Mapping[KT, VT], **kw: VT) -> None: ... - @t.overload - def update(self, __i: Items[KT, VT], **kw: VT) -> None: ... - @t.overload - def update(self, **kw: VT) -> None: ... - - def update(self, *args: MapOrItems[KT, VT], **kw: VT) -> None: - """Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*.""" - if args or kw: - self._update(get_arg(*args), kw) - - @t.overload - def forceupdate(self, __m: t.Mapping[KT, VT], **kw: VT) -> None: ... - @t.overload - def forceupdate(self, __i: Items[KT, VT], **kw: VT) -> None: ... - @t.overload - def forceupdate(self, **kw: VT) -> None: ... - - def forceupdate(self, *args: MapOrItems[KT, VT], **kw: VT) -> None: - """Like a bulk :meth:`forceput`.""" - if args or kw: - self._update(get_arg(*args), kw, on_dup=ON_DUP_DROP_OLD) - - def __ior__(self, other: t.Mapping[KT, VT]) -> MutableBidict[KT, VT]: - """Return self|=other.""" - self.update(other) - return self - - @t.overload - def putall(self, items: t.Mapping[KT, VT], on_dup: OnDup) -> None: ... - @t.overload - def putall(self, items: Items[KT, VT], on_dup: OnDup = ...) -> None: ... - - def putall(self, items: MapOrItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None: - """Like a bulk :meth:`put`. - - If one of the given items causes an exception to be raised, - none of the items is inserted. - """ - if items: - self._update(items, on_dup=on_dup) - - -class bidict(MutableBidict[KT, VT]): - """The main bidirectional mapping type. - - See :ref:`intro:Introduction` and :ref:`basic-usage:Basic Usage` - to get started (also available at https://bidict.rtfd.io). - """ - - if t.TYPE_CHECKING: - @property - def inverse(self) -> bidict[VT, KT]: ... - - -# * Code review nav * -#============================================================================== -# ← Prev: _frozenbidict.py Current: _bidict.py Next: _orderedbase.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_dup.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_dup.py deleted file mode 100644 index c1ac9e3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_dup.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide :class:`OnDup` and related functionality.""" - - -from __future__ import annotations -from enum import Enum -import typing as t - - -class OD(Enum): - """An action to take to prevent duplication from occurring.""" - - #: Raise a :class:`~bidict.DuplicationError`. - RAISE = 'RAISE' - #: Overwrite existing items with new items. - DROP_OLD = 'DROP_OLD' - #: Keep existing items and drop new items. - DROP_NEW = 'DROP_NEW' - - def __repr__(self) -> str: - return f'{self.__class__.__name__}.{self.name}' - - -RAISE: t.Final[OD] = OD.RAISE -DROP_OLD: t.Final[OD] = OD.DROP_OLD -DROP_NEW: t.Final[OD] = OD.DROP_NEW - - -class OnDup(t.NamedTuple('_OnDup', [('key', OD), ('val', OD), ('kv', OD)])): - r"""A 3-tuple of :class:`OD`\s specifying how to handle the 3 kinds of duplication. - - *See also* :ref:`basic-usage:Values Must Be Unique` - (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique) - - If *kv* is not specified, *val* will be used for *kv*. - """ - - __slots__ = () - - def __new__(cls, key: OD = DROP_OLD, val: OD = RAISE, kv: OD | None = None) -> OnDup: - """Override to provide user-friendly default values.""" - return super().__new__(cls, key, val, kv or val) - - -#: Default :class:`OnDup` used for the -#: :meth:`~bidict.bidict.__init__`, -#: :meth:`~bidict.bidict.__setitem__`, and -#: :meth:`~bidict.bidict.update` methods. -ON_DUP_DEFAULT: t.Final[OnDup] = OnDup(key=DROP_OLD, val=RAISE, kv=RAISE) -#: An :class:`OnDup` whose members are all :obj:`RAISE`. -ON_DUP_RAISE: t.Final[OnDup] = OnDup(key=RAISE, val=RAISE, kv=RAISE) -#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`. -ON_DUP_DROP_OLD: t.Final[OnDup] = OnDup(key=DROP_OLD, val=DROP_OLD, kv=DROP_OLD) diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_exc.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_exc.py deleted file mode 100644 index 936563b..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_exc.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide all bidict exceptions.""" - -from __future__ import annotations - - -class BidictException(Exception): - """Base class for bidict exceptions.""" - - -class DuplicationError(BidictException): - """Base class for exceptions raised when uniqueness is violated - as per the :attr:~bidict.RAISE` :class:`~bidict.OnDupAction`. - """ - - -class KeyDuplicationError(DuplicationError): - """Raised when a given key is not unique.""" - - -class ValueDuplicationError(DuplicationError): - """Raised when a given value is not unique.""" - - -class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError): - """Raised when a given item's key and value are not unique. - - That is, its key duplicates that of another item, - and its value duplicates that of a different other item. - """ diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenbidict.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenbidict.py deleted file mode 100644 index 9a65303..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenbidict.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: _base.py Current: _frozenbidict.py Next: _bidict.py → -#============================================================================== - -"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type.""" - -from __future__ import annotations -import typing as t - -from ._base import BidictBase -from ._typing import KT, VT - - -class frozenbidict(BidictBase[KT, VT]): - """Immutable, hashable bidict type.""" - - _hash: int - - # Work around lack of support for higher-kinded types in Python. - # Ref: https://github.com/python/typing/issues/548#issuecomment-621571821 - if t.TYPE_CHECKING: - @property - def inverse(self) -> frozenbidict[VT, KT]: ... - - def __hash__(self) -> int: - """The hash of this bidict as determined by its items.""" - if getattr(self, '_hash', None) is None: - # The following is like hash(frozenset(self.items())) - # but more memory efficient. See also: https://bugs.python.org/issue46684 - self._hash = t.ItemsView(self)._hash() - return self._hash - - -# * Code review nav * -#============================================================================== -# ← Prev: _base.py Current: _frozenbidict.py Next: _bidict.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenordered.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenordered.py deleted file mode 100644 index 56e4c7a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_frozenordered.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py → -#============================================================================== - -"""Provide :class:`FrozenOrderedBidict`, an immutable, hashable, ordered bidict.""" - -from __future__ import annotations -import typing as t - -from ._frozenbidict import frozenbidict -from ._orderedbase import OrderedBidictBase -from ._typing import KT, VT - - -class FrozenOrderedBidict(OrderedBidictBase[KT, VT]): - """Hashable, immutable, ordered bidict type. - - Like a hashable :class:`bidict.OrderedBidict` - without the mutating APIs, or like a - reversible :class:`bidict.frozenbidict` even on Python < 3.8. - (All bidicts are order-preserving when never mutated, so frozenbidict is - already order-preserving, but only on Python 3.8+, where dicts are - reversible, are all bidicts (including frozenbidict) also reversible.) - - If you are using Python 3.8+, frozenbidict gives you everything that - FrozenOrderedBidict gives you, but with less space overhead. - On the other hand, using FrozenOrderedBidict when you are depending on - the ordering of the items can make the ordering dependence more explicit. - """ - - __hash__: t.Callable[[t.Any], int] = frozenbidict.__hash__ - - if t.TYPE_CHECKING: - @property - def inverse(self) -> FrozenOrderedBidict[VT, KT]: ... - - -# * Code review nav * -#============================================================================== -#← Prev: _orderedbase.py Current: _frozenordered.py Next: _orderedbidict.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_iter.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_iter.py deleted file mode 100644 index c06dc87..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_iter.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Functions for iterating over items in a mapping.""" - -from __future__ import annotations -from operator import itemgetter -import typing as t - -from ._typing import KT, VT, ItemsIter, MapOrItems - - -def iteritems_mapping_or_iterable(arg: MapOrItems[KT, VT]) -> ItemsIter[KT, VT]: - """Yield the items in *arg* based on whether it's a mapping.""" - yield from arg.items() if isinstance(arg, t.Mapping) else arg - - -def iteritems(__arg: MapOrItems[KT, VT], **kw: VT) -> ItemsIter[KT, VT]: - """Yield the items from *arg* and then any from *kw* in the order given.""" - yield from iteritems_mapping_or_iterable(__arg) - yield from kw.items() # type: ignore [misc] - - -swap = itemgetter(1, 0) - - -def inverted(arg: MapOrItems[KT, VT]) -> ItemsIter[VT, KT]: - """Yield the inverse items of the provided object. - - If *arg* has a :func:`callable` ``__inverted__`` attribute, - return the result of calling it. - - Otherwise, return an iterator over the items in `arg`, - inverting each item on the fly. - - *See also* :attr:`bidict.BidirectionalMapping.__inverted__` - """ - invattr = getattr(arg, '__inverted__', None) - if callable(invattr): - inv: ItemsIter[VT, KT] = invattr() - return inv - return map(swap, iteritems_mapping_or_iterable(arg)) diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_named.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_named.py deleted file mode 100644 index c0ca4e4..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_named.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -"""Provide :func:`bidict.namedbidict`.""" - -from __future__ import annotations -from sys import _getframe -import typing as t - -from ._base import BidictBase -from ._bidict import bidict -from ._typing import KT, VT - - -class NamedBidictBase: - """Base class that namedbidicts derive from.""" - - -def namedbidict( - typename: str, - keyname: str, - valname: str, - *, - base_type: t.Type[BidictBase[KT, VT]] = bidict, -) -> t.Type[BidictBase[KT, VT]]: - r"""Create a new subclass of *base_type* with custom accessors. - - Like :func:`collections.namedtuple` for bidicts. - - The new class's ``__name__`` and ``__qualname__`` will be set to *typename*, - and its ``__module__`` will be set to the caller's module. - - Instances of the new class will provide access to their - :attr:`inverse ` instances - via the custom *keyname*\_for property, - and access to themselves - via the custom *valname*\_for property. - - *See also* the :ref:`namedbidict usage documentation - ` - (https://bidict.rtfd.io/other-bidict-types.html#namedbidict) - - :raises ValueError: if any of the *typename*, *keyname*, or *valname* - strings is not a valid Python identifier, or if *keyname == valname*. - - :raises TypeError: if *base_type* is not a :class:`bidict.BidictBase` subclass. - Any of the concrete bidict types pictured in the - :ref:`other-bidict-types:Bidict Types Diagram` may be provided - (https://bidict.rtfd.io/other-bidict-types.html#bidict-types-diagram). - """ - if not issubclass(base_type, BidictBase): - raise TypeError(f'{base_type} is not a BidictBase subclass') - names = (typename, keyname, valname) - if not all(map(str.isidentifier, names)) or keyname == valname: - raise ValueError(names) - - basename = base_type.__name__ - get_keyname = property(lambda self: keyname, doc='The keyname of this namedbidict.') - get_valname = property(lambda self: valname, doc='The valname of this namedbidict.') - val_by_key_name = f'{valname}_for' - key_by_val_name = f'{keyname}_for' - val_by_key_doc = f'{typename} forward {basename}: {keyname} -> {valname}' - key_by_val_doc = f'{typename} inverse {basename}: {valname} -> {keyname}' - get_val_by_key = property(lambda self: self, doc=val_by_key_doc) - get_key_by_val = property(lambda self: self.inverse, doc=key_by_val_doc) - - class NamedBidict(base_type, NamedBidictBase): # type: ignore [valid-type,misc] # https://github.com/python/mypy/issues/5865 - """NamedBidict.""" - - keyname = get_keyname - valname = get_valname - - @classmethod - def _inv_cls_dict_diff(cls) -> dict[str, t.Any]: - base_diff = super()._inv_cls_dict_diff() - return { - **base_diff, - 'keyname': get_valname, - 'valname': get_keyname, - val_by_key_name: get_key_by_val, - key_by_val_name: get_val_by_key, - } - - NamedInv = NamedBidict._inv_cls - assert NamedInv is not NamedBidict, 'namedbidict classes are not their own inverses' - setattr(NamedBidict, val_by_key_name, get_val_by_key) - setattr(NamedBidict, key_by_val_name, get_key_by_val) - NamedBidict.__name__ = NamedBidict.__qualname__ = typename - NamedInv.__name__ = NamedInv.__qualname__ = f'{typename}Inv' - NamedBidict.__doc__ = f'NamedBidict({basename}) {typename!r}: {keyname} -> {valname}' - NamedInv.__doc__ = f'NamedBidictInv({basename}) {typename!r}: {valname} -> {keyname}' - caller_module = _getframe(1).f_globals.get('__name__', '__main__') - NamedBidict.__module__ = NamedInv.__module__ = caller_module - return NamedBidict diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbase.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbase.py deleted file mode 100644 index 2375e78..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbase.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py → -#============================================================================== - - -"""Provide :class:`OrderedBidictBase`.""" - -from __future__ import annotations -from functools import partial -from weakref import ref as weakref -import typing as t - -from ._base import BidictBase, PreparedWrite -from ._bidict import bidict -from ._iter import iteritems -from ._typing import KT, VT, OKT, OVT, MISSING, Items, MapOrItems - - -IT = t.TypeVar('IT') # instance type -AT = t.TypeVar('AT') # attr type - - -class WeakAttr(t.Generic[IT, AT]): - """Descriptor to automatically manage (de)referencing the given slot as a weakref. - - See https://docs.python.org/3/howto/descriptor.html#managed-attributes - for an intro to using descriptors like this for managed attributes. - """ - - def __init__(self, *, slot: str) -> None: - self.slot = slot - - def __set__(self, instance: IT, value: AT) -> None: - setattr(instance, self.slot, weakref(value)) - - def __get__(self, instance: IT, owner: t.Any) -> AT: - return getattr(instance, self.slot)() # type: ignore [no-any-return] - - -class Node: - """A node in a circular doubly-linked list - used to encode the order of items in an ordered bidict. - - A weak reference to the previous node is stored - to avoid creating strong reference cycles. - Referencing/dereferencing the weakref is handled automatically by :class:`WeakAttr`. - """ - - prv: WeakAttr[Node, Node] = WeakAttr(slot='_prv_weak') - __slots__ = ('_prv_weak', 'nxt', '__weakref__') - - def __init__(self, prv: Node, nxt: Node) -> None: - self.prv = prv - self.nxt = nxt - - def unlink(self) -> None: - """Remove self from in between prv and nxt. - Self's references to prv and nxt are retained so it can be relinked (see below). - """ - self.prv.nxt = self.nxt - self.nxt.prv = self.prv - - def relink(self) -> None: - """Restore self between prv and nxt after unlinking (see above).""" - self.prv.nxt = self.nxt.prv = self - - -class SentinelNode(Node): - """Special node in a circular doubly-linked list - that links the first node with the last node. - When its next and previous references point back to itself - it represents an empty list. - """ - - nxt: WeakAttr['SentinelNode', Node] = WeakAttr(slot='_nxt_weak') # type: ignore [assignment] - __slots__ = ('_nxt_weak',) - - def __init__(self) -> None: - super().__init__(self, self) - - def iternodes(self, *, reverse: bool = False) -> t.Iterator[Node]: - """Iterator yielding nodes in the requested order.""" - attr = 'prv' if reverse else 'nxt' - node = getattr(self, attr) - while node is not self: - yield node - node = getattr(node, attr) - - def new_last_node(self) -> Node: - """Create and return a new terminal node.""" - old_last = self.prv - new_last = Node(old_last, self) - old_last.nxt = self.prv = new_last - return new_last - - -class OrderedBidictBase(BidictBase[KT, VT]): - """Base class implementing an ordered :class:`BidirectionalMapping`.""" - - _repr_delegate: t.ClassVar[t.Any] = list - - _node_by_korv: bidict[t.Any, Node] - _bykey: bool - - @t.overload - def __init__(self, __m: t.Mapping[KT, VT], **kw: VT) -> None: ... - @t.overload - def __init__(self, __i: Items[KT, VT], **kw: VT) -> None: ... - @t.overload - def __init__(self, **kw: VT) -> None: ... - - def __init__(self, *args: MapOrItems[KT, VT], **kw: VT) -> None: - """Make a new ordered bidirectional mapping. - The signature behaves like that of :class:`dict`. - Items passed in are added in the order they are passed, - respecting the :attr:`on_dup` class attribute in the process. - - The order in which items are inserted is remembered, - similar to :class:`collections.OrderedDict`. - """ - self._sntl = SentinelNode() - self._node_by_korv = bidict() - self._bykey = True - super().__init__(*args, **kw) - - if t.TYPE_CHECKING: - @property - def inverse(self) -> OrderedBidictBase[VT, KT]: ... - - def _make_inverse(self) -> OrderedBidictBase[VT, KT]: - inv = t.cast(OrderedBidictBase[VT, KT], super()._make_inverse()) - inv._sntl = self._sntl - inv._node_by_korv = self._node_by_korv - inv._bykey = not self._bykey - return inv - - def _assoc_node(self, node: Node, key: KT, val: VT) -> None: - korv = key if self._bykey else val - self._node_by_korv.forceput(korv, node) - - def _dissoc_node(self, node: Node) -> None: - del self._node_by_korv.inverse[node] - node.unlink() - - def _init_from(self, other: MapOrItems[KT, VT]) -> None: - """See :meth:`BidictBase._init_from`.""" - super()._init_from(other) - bykey = self._bykey - korv_by_node = self._node_by_korv.inverse - korv_by_node.clear() - korv_by_node_set = korv_by_node.__setitem__ - self._sntl.nxt = self._sntl.prv = self._sntl - new_node = self._sntl.new_last_node - for (k, v) in iteritems(other): - korv_by_node_set(new_node(), k if bykey else v) - - def _prep_write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], save_unwrite: bool) -> PreparedWrite: - """See :meth:`bidict.BidictBase._prep_write`.""" - write, unwrite = super()._prep_write(newkey, newval, oldkey, oldval, save_unwrite) - assoc, dissoc = self._assoc_node, self._dissoc_node - node_by_korv, bykey = self._node_by_korv, self._bykey - if oldval is MISSING and oldkey is MISSING: # no key or value duplication - # {0: 1, 2: 3} + (4, 5) => {0: 1, 2: 3, 4: 5} - newnode = self._sntl.new_last_node() - write.append(partial(assoc, newnode, newkey, newval)) - if save_unwrite: - unwrite.append(partial(dissoc, newnode)) - elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items - # {0: 1, 2: 3} + (0, 3) => {0: 3} - # n1, n2 => n1 (collapse n1 and n2 into n1) - # oldkey: 2, oldval: 1, oldnode: n2, newkey: 0, newval: 3, newnode: n1 - if bykey: - oldnode = node_by_korv[oldkey] - newnode = node_by_korv[newkey] - else: - oldnode = node_by_korv[newval] - newnode = node_by_korv[oldval] - write.extend(( - partial(dissoc, oldnode), - partial(assoc, newnode, newkey, newval), - )) - if save_unwrite: - unwrite.extend(( - partial(assoc, newnode, newkey, oldval), - partial(assoc, oldnode, oldkey, newval), - partial(oldnode.relink,), - )) - elif oldval is not MISSING: # just key duplication - # {0: 1, 2: 3} + (2, 4) => {0: 1, 2: 4} - # oldkey: MISSING, oldval: 3, newkey: 2, newval: 4 - node = node_by_korv[newkey if bykey else oldval] - write.append(partial(assoc, node, newkey, newval)) - if save_unwrite: - unwrite.append(partial(assoc, node, newkey, oldval)) - else: - assert oldkey is not MISSING # just value duplication - # {0: 1, 2: 3} + (4, 3) => {0: 1, 4: 3} - # oldkey: 2, oldval: MISSING, newkey: 4, newval: 3 - node = node_by_korv[oldkey if bykey else newval] - write.append(partial(assoc, node, newkey, newval)) - if save_unwrite: - unwrite.append(partial(assoc, node, oldkey, newval)) - return write, unwrite - - def __iter__(self) -> t.Iterator[KT]: - """Iterator over the contained keys in insertion order.""" - return self._iter(reverse=False) - - def __reversed__(self) -> t.Iterator[KT]: - """Iterator over the contained keys in reverse insertion order.""" - return self._iter(reverse=True) - - def _iter(self, *, reverse: bool = False) -> t.Iterator[KT]: - nodes = self._sntl.iternodes(reverse=reverse) - korv_by_node = self._node_by_korv.inverse - if self._bykey: - for node in nodes: - yield korv_by_node[node] - else: - key_by_val = self._invm - for node in nodes: - val = korv_by_node[node] - yield key_by_val[val] - - -# * Code review nav * -#============================================================================== -# ← Prev: _bidict.py Current: _orderedbase.py Next: _frozenordered.py → -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbidict.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbidict.py deleted file mode 100644 index 96a57a1..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_orderedbidict.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -#============================================================================== -# ← Prev: _frozenordered.py Current: _orderedbidict.py -#============================================================================== - - -"""Provide :class:`OrderedBidict`.""" - -from __future__ import annotations -from collections.abc import Set -import typing as t - -from ._base import BidictKeysView -from ._bidict import MutableBidict -from ._orderedbase import OrderedBidictBase -from ._typing import KT, VT - - -class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]): - """Mutable bidict type that maintains items in insertion order.""" - - if t.TYPE_CHECKING: - @property - def inverse(self) -> OrderedBidict[VT, KT]: ... - - def clear(self) -> None: - """Remove all items.""" - super().clear() - self._node_by_korv.clear() - self._sntl.nxt = self._sntl.prv = self._sntl - - def _pop(self, key: KT) -> VT: - val = super()._pop(key) - node = self._node_by_korv[key if self._bykey else val] - self._dissoc_node(node) - return val - - def popitem(self, last: bool = True) -> tuple[KT, VT]: - """*b.popitem() → (k, v)* - - If *last* is true, - remove and return the most recently added item as a (key, value) pair. - Otherwise, remove and return the least recently added item. - - :raises KeyError: if *b* is empty. - """ - if not self: - raise KeyError('OrderedBidict is empty') - node = getattr(self._sntl, 'prv' if last else 'nxt') - korv = self._node_by_korv.inverse[node] - if self._bykey: - return korv, self._pop(korv) - return self.inverse._pop(korv), korv - - def move_to_end(self, key: KT, last: bool = True) -> None: - """Move the item with the given key to the end if *last* is true, else to the beginning. - - :raises KeyError: if *key* is missing - """ - korv = key if self._bykey else self._fwdm[key] - node = self._node_by_korv[korv] - node.prv.nxt = node.nxt - node.nxt.prv = node.prv - sntl = self._sntl - if last: - lastnode = sntl.prv - node.prv = lastnode - node.nxt = sntl - sntl.prv = lastnode.nxt = node - else: - firstnode = sntl.nxt - node.prv = sntl - node.nxt = firstnode - sntl.nxt = firstnode.prv = node - - # Override the keys() and items() implementations inherited from BidictBase, - # which may delegate to the backing _fwdm dict, since this is a mutable ordered bidict, - # and therefore the ordering of items can get out of sync with the backing mappings - # after mutation. (Need not override values() because it delegates to .inverse.keys().) - def keys(self) -> t.KeysView[KT]: - """A set-like object providing a view on the contained keys.""" - return _OrderedBidictKeysView(self) - - def items(self) -> t.ItemsView[KT, VT]: - """A set-like object providing a view on the contained items.""" - return _OrderedBidictItemsView(self) - - -# The following MappingView implementations use the __iter__ implementations -# inherited from their superclass counterparts in collections.abc, so they -# continue to yield items in the correct order even after an OrderedBidict -# is mutated. They also provide a __reversed__ implementation, which is not -# provided by the collections.abc superclasses. -class _OrderedBidictKeysView(BidictKeysView[KT]): - _mapping: OrderedBidict[KT, t.Any] - - def __reversed__(self) -> t.Iterator[KT]: - return reversed(self._mapping) - - -class _OrderedBidictItemsView(t.ItemsView[KT, VT]): - _mapping: OrderedBidict[KT, VT] - - def __reversed__(self) -> t.Iterator[tuple[KT, VT]]: - ob = self._mapping - for key in reversed(ob): - yield key, ob[key] - - -# For better performance, make _OrderedBidictKeysView and _OrderedBidictItemsView delegate -# to backing dicts for the methods they inherit from collections.abc.Set. (Cannot delegate -# for __iter__ and __reversed__ since they are order-sensitive.) See also: https://bugs.python.org/issue46713 -def _override_set_methods_to_use_backing_dict( - cls: t.Type[_OrderedBidictKeysView[KT]] | t.Type[_OrderedBidictItemsView[KT, t.Any]], - viewname: str, - _setmethodnames: t.Iterable[str] = ( - '__lt__', '__le__', '__gt__', '__ge__', '__eq__', '__ne__', '__sub__', '__rsub__', - '__or__', '__ror__', '__xor__', '__rxor__', '__and__', '__rand__', 'isdisjoint', - ) -) -> None: - def make_proxy_method(methodname: str) -> t.Any: - def method(self: _OrderedBidictKeysView[KT] | _OrderedBidictItemsView[KT, t.Any], *args: t.Any) -> t.Any: - fwdm = self._mapping._fwdm - if not isinstance(fwdm, dict): # dict view speedup not available, fall back to Set's implementation. - return getattr(Set, methodname)(self, *args) - fwdm_dict_view = getattr(fwdm, viewname)() - fwdm_dict_view_method = getattr(fwdm_dict_view, methodname) - if len(args) != 1 or not isinstance(args[0], self.__class__) or not isinstance(args[0]._mapping._fwdm, dict): - return fwdm_dict_view_method(*args) - # self and arg are both _OrderedBidictKeysViews or _OrderedBidictItemsViews whose bidicts are backed by a dict. - # Use arg's backing dict's corresponding view instead of arg. Otherwise, e.g. `ob1.keys() < ob2.keys()` would give - # "TypeError: '<' not supported between instances of '_OrderedBidictKeysView' and '_OrderedBidictKeysView'", because - # both `dict_keys(ob1).__lt__(ob2.keys()) is NotImplemented` and `dict_keys(ob2).__gt__(ob1.keys()) is NotImplemented`. - arg_dict = args[0]._mapping._fwdm - arg_dict_view = getattr(arg_dict, viewname)() - return fwdm_dict_view_method(arg_dict_view) - method.__name__ = methodname - method.__qualname__ = f'{cls.__qualname__}.{methodname}' - return method - - for name in _setmethodnames: - setattr(cls, name, make_proxy_method(name)) - - -_override_set_methods_to_use_backing_dict(_OrderedBidictKeysView, 'keys') -_override_set_methods_to_use_backing_dict(_OrderedBidictItemsView, 'items') - - -# * Code review nav * -#============================================================================== -# ← Prev: _frozenordered.py Current: _orderedbidict.py -#============================================================================== diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/_typing.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/_typing.py deleted file mode 100644 index 482267e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/_typing.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide typing-related objects.""" - -from __future__ import annotations -from enum import Enum -import typing as t - -if t.TYPE_CHECKING: - from typing_extensions import TypeAlias as TypeAlias -else: - TypeAlias = 'TypeAlias' - - -KT = t.TypeVar('KT') -VT = t.TypeVar('VT') - - -Items: TypeAlias = 't.Iterable[tuple[KT, VT]]' -MapOrItems: TypeAlias = 't.Mapping[KT, VT] | Items[KT, VT]' -ItemsIter: TypeAlias = 't.Iterator[tuple[KT, VT]]' - - -class MissingT(Enum): - """Sentinel used to represent none/missing when None itself can't be used.""" - - MISSING = 'MISSING' - - def __repr__(self) -> str: - return '' - - -MISSING: t.Final[MissingT] = MissingT.MISSING -OKT: TypeAlias = 'KT | MissingT' #: optional key type -OVT: TypeAlias = 'VT | MissingT' #: optional value type - -DT = t.TypeVar('DT') #: for default arguments -ODT: TypeAlias = 'DT | MissingT' #: optional default arg type diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/metadata.py b/venv_flaskchat/lib/python3.11/site-packages/bidict/metadata.py deleted file mode 100644 index 00250fe..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/metadata.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2009-2022 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -"""Define bidict package metadata.""" - - -__version__ = '0.22.1' -__author__ = {'name': 'Joshua Bronson', 'email': 'jabronson@gmail.com'} -__copyright__ = '© 2009-2022 Joshua Bronson' -__description__ = 'The bidirectional mapping library for Python.' -__license__ = 'MPL 2.0' -__url__ = 'https://bidict.readthedocs.io' diff --git a/venv_flaskchat/lib/python3.11/site-packages/bidict/py.typed b/venv_flaskchat/lib/python3.11/site-packages/bidict/py.typed deleted file mode 100644 index 342ea76..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/bidict/py.typed +++ /dev/null @@ -1 +0,0 @@ -PEP-561 marker. diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/LICENSE.rst deleted file mode 100644 index 79c9825..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2010 Jason Kirtland - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/METADATA deleted file mode 100644 index d181f19..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/METADATA +++ /dev/null @@ -1,63 +0,0 @@ -Metadata-Version: 2.1 -Name: blinker -Version: 1.6.2 -Summary: Fast, simple object-to-object and broadcast signaling -Author-email: Jason Kirtland -Maintainer-email: Pallets Ecosystem -License: MIT License -Project-URL: Homepage, https://blinker.readthedocs.io -Project-URL: Documentation, https://blinker.readthedocs.io -Project-URL: Source Code, https://github.com/pallets-eco/blinker/ -Project-URL: Issue Tracker, https://github.com/pallets-eco/blinker/issues/ -Project-URL: Chat, https://discord.gg/pallets -Keywords: signal,emit,events,broadcast -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Software Development :: Libraries -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -Blinker -======= - -Blinker provides a fast dispatching system that allows any number of -interested parties to subscribe to events, or "signals". - -Signal receivers can subscribe to specific senders or receive signals -sent by any sender. - -.. code-block:: pycon - - >>> from blinker import signal - >>> started = signal('round-started') - >>> def each(round): - ... print(f"Round {round}") - ... - >>> started.connect(each) - - >>> def round_two(round): - ... print("This is round two.") - ... - >>> started.connect(round_two, sender=2) - - >>> for round in range(1, 4): - ... started.send(round) - ... - Round 1! - Round 2! - This is round two. - Round 3! - - -Links ------ - -- Documentation: https://blinker.readthedocs.io/ -- Changes: https://blinker.readthedocs.io/#changes -- PyPI Releases: https://pypi.org/project/blinker/ -- Source Code: https://github.com/pallets-eco/blinker/ -- Issue Tracker: https://github.com/pallets-eco/blinker/issues/ diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/RECORD deleted file mode 100644 index 8b67abc..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -blinker-1.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -blinker-1.6.2.dist-info/LICENSE.rst,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 -blinker-1.6.2.dist-info/METADATA,sha256=7MRskabu2wQvWIMFwgqP3w2LDt8nR5nCxH7Anu1ZrBM,1964 -blinker-1.6.2.dist-info/RECORD,, -blinker-1.6.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -blinker-1.6.2.dist-info/top_level.txt,sha256=2NmsENM0J2t9Z8mkjxHDmGMQj7Bm8f5ZTTYe1x1fZtM,8 -blinker/__init__.py,sha256=Ko7EbvxyCl_UewgsP8XgDJqJcHZA7EsuhG72R_zDrcY,408 -blinker/__pycache__/__init__.cpython-311.pyc,, -blinker/__pycache__/_saferef.cpython-311.pyc,, -blinker/__pycache__/_utilities.cpython-311.pyc,, -blinker/__pycache__/base.cpython-311.pyc,, -blinker/_saferef.py,sha256=kWOTIWnCY3kOb8lZP74Rbx7bR_BLVg4TjwzNCRLhKHs,9096 -blinker/_utilities.py,sha256=GPXtJzykzVotoxHC79mgFQMPJtICwpVDCCpus4_JtsA,4110 -blinker/base.py,sha256=7Y-C0ZVIe-NrrskPeqj0bLSp4R6Cpq5LrzI1DmLqMEA,20469 -blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/top_level.txt deleted file mode 100644 index 1ff4ca5..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker-1.6.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -blinker diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/blinker/__init__.py deleted file mode 100644 index 71d66d3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from blinker.base import ANY -from blinker.base import NamedSignal -from blinker.base import Namespace -from blinker.base import receiver_connected -from blinker.base import Signal -from blinker.base import signal -from blinker.base import WeakNamespace - -__all__ = [ - "ANY", - "NamedSignal", - "Namespace", - "Signal", - "WeakNamespace", - "receiver_connected", - "signal", -] - -__version__ = "1.6.2" diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 2b5e968..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_saferef.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_saferef.cpython-311.pyc deleted file mode 100644 index 704ee27..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_saferef.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_utilities.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_utilities.cpython-311.pyc deleted file mode 100644 index e796789..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/_utilities.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/base.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/base.cpython-311.pyc deleted file mode 100644 index 6bb058c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/blinker/__pycache__/base.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/_saferef.py b/venv_flaskchat/lib/python3.11/site-packages/blinker/_saferef.py deleted file mode 100644 index dcb70c1..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker/_saferef.py +++ /dev/null @@ -1,230 +0,0 @@ -# extracted from Louie, http://pylouie.org/ -# updated for Python 3 -# -# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher, -# Matthew R. Scott -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# -# * Neither the name of the nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# -"""Refactored 'safe reference from dispatcher.py""" -import operator -import sys -import traceback -import weakref - - -get_self = operator.attrgetter("__self__") -get_func = operator.attrgetter("__func__") - - -def safe_ref(target, on_delete=None): - """Return a *safe* weak reference to a callable target. - - - ``target``: The object to be weakly referenced, if it's a bound - method reference, will create a BoundMethodWeakref, otherwise - creates a simple weakref. - - - ``on_delete``: If provided, will have a hard reference stored to - the callable to be called after the safe reference goes out of - scope with the reference object, (either a weakref or a - BoundMethodWeakref) as argument. - """ - try: - im_self = get_self(target) - except AttributeError: - if callable(on_delete): - return weakref.ref(target, on_delete) - else: - return weakref.ref(target) - else: - if im_self is not None: - # Turn a bound method into a BoundMethodWeakref instance. - # Keep track of these instances for lookup by disconnect(). - assert hasattr(target, "im_func") or hasattr(target, "__func__"), ( - f"safe_ref target {target!r} has im_self, but no im_func, " - "don't know how to create reference" - ) - reference = BoundMethodWeakref(target=target, on_delete=on_delete) - return reference - - -class BoundMethodWeakref: - """'Safe' and reusable weak references to instance methods. - - BoundMethodWeakref objects provide a mechanism for referencing a - bound method without requiring that the method object itself - (which is normally a transient object) is kept alive. Instead, - the BoundMethodWeakref object keeps weak references to both the - object and the function which together define the instance method. - - Attributes: - - - ``key``: The identity key for the reference, calculated by the - class's calculate_key method applied to the target instance method. - - - ``deletion_methods``: Sequence of callable objects taking single - argument, a reference to this object which will be called when - *either* the target object or target function is garbage - collected (i.e. when this object becomes invalid). These are - specified as the on_delete parameters of safe_ref calls. - - - ``weak_self``: Weak reference to the target object. - - - ``weak_func``: Weak reference to the target function. - - Class Attributes: - - - ``_all_instances``: Class attribute pointing to all live - BoundMethodWeakref objects indexed by the class's - calculate_key(target) method applied to the target objects. - This weak value dictionary is used to short-circuit creation so - that multiple references to the same (object, function) pair - produce the same BoundMethodWeakref instance. - """ - - _all_instances = weakref.WeakValueDictionary() # type: ignore[var-annotated] - - def __new__(cls, target, on_delete=None, *arguments, **named): - """Create new instance or return current instance. - - Basically this method of construction allows us to - short-circuit creation of references to already-referenced - instance methods. The key corresponding to the target is - calculated, and if there is already an existing reference, - that is returned, with its deletion_methods attribute updated. - Otherwise the new instance is created and registered in the - table of already-referenced methods. - """ - key = cls.calculate_key(target) - current = cls._all_instances.get(key) - if current is not None: - current.deletion_methods.append(on_delete) - return current - else: - base = super().__new__(cls) - cls._all_instances[key] = base - base.__init__(target, on_delete, *arguments, **named) - return base - - def __init__(self, target, on_delete=None): - """Return a weak-reference-like instance for a bound method. - - - ``target``: The instance-method target for the weak reference, - must have im_self and im_func attributes and be - reconstructable via the following, which is true of built-in - instance methods:: - - target.im_func.__get__( target.im_self ) - - - ``on_delete``: Optional callback which will be called when - this weak reference ceases to be valid (i.e. either the - object or the function is garbage collected). Should take a - single argument, which will be passed a pointer to this - object. - """ - - def remove(weak, self=self): - """Set self.isDead to True when method or instance is destroyed.""" - methods = self.deletion_methods[:] - del self.deletion_methods[:] - try: - del self.__class__._all_instances[self.key] - except KeyError: - pass - for function in methods: - try: - if callable(function): - function(self) - except Exception: - try: - traceback.print_exc() - except AttributeError: - e = sys.exc_info()[1] - print( - f"Exception during saferef {self} " - f"cleanup function {function}: {e}" - ) - - self.deletion_methods = [on_delete] - self.key = self.calculate_key(target) - im_self = get_self(target) - im_func = get_func(target) - self.weak_self = weakref.ref(im_self, remove) - self.weak_func = weakref.ref(im_func, remove) - self.self_name = str(im_self) - self.func_name = str(im_func.__name__) - - @classmethod - def calculate_key(cls, target): - """Calculate the reference key for this reference. - - Currently this is a two-tuple of the id()'s of the target - object and the target function respectively. - """ - return (id(get_self(target)), id(get_func(target))) - - def __str__(self): - """Give a friendly representation of the object.""" - return "{}({}.{})".format( - self.__class__.__name__, - self.self_name, - self.func_name, - ) - - __repr__ = __str__ - - def __hash__(self): - return hash((self.self_name, self.key)) - - def __nonzero__(self): - """Whether we are still a valid reference.""" - return self() is not None - - def __eq__(self, other): - """Compare with another reference.""" - if not isinstance(other, self.__class__): - return operator.eq(self.__class__, type(other)) - return operator.eq(self.key, other.key) - - def __call__(self): - """Return a strong reference to the bound method. - - If the target cannot be retrieved, then will return None, - otherwise returns a bound instance method for our object and - function. - - Note: You may call this method any number of times, as it does - not invalidate the reference. - """ - target = self.weak_self() - if target is not None: - function = self.weak_func() - if function is not None: - return function.__get__(target) - return None diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/_utilities.py b/venv_flaskchat/lib/python3.11/site-packages/blinker/_utilities.py deleted file mode 100644 index 068d94c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker/_utilities.py +++ /dev/null @@ -1,142 +0,0 @@ -from __future__ import annotations - -import asyncio -import inspect -import sys -import typing as t -from functools import partial -from weakref import ref - -from blinker._saferef import BoundMethodWeakref - -IdentityType = t.Union[t.Tuple[int, int], str, int] - - -class _symbol: - def __init__(self, name): - """Construct a new named symbol.""" - self.__name__ = self.name = name - - def __reduce__(self): - return symbol, (self.name,) - - def __repr__(self): - return self.name - - -_symbol.__name__ = "symbol" - - -class symbol: - """A constant symbol. - - >>> symbol('foo') is symbol('foo') - True - >>> symbol('foo') - foo - - A slight refinement of the MAGICCOOKIE=object() pattern. The primary - advantage of symbol() is its repr(). They are also singletons. - - Repeated calls of symbol('name') will all return the same instance. - - """ - - symbols = {} # type: ignore[var-annotated] - - def __new__(cls, name): - try: - return cls.symbols[name] - except KeyError: - return cls.symbols.setdefault(name, _symbol(name)) - - -def hashable_identity(obj: object) -> IdentityType: - if hasattr(obj, "__func__"): - return (id(obj.__func__), id(obj.__self__)) # type: ignore[attr-defined] - elif hasattr(obj, "im_func"): - return (id(obj.im_func), id(obj.im_self)) # type: ignore[attr-defined] - elif isinstance(obj, (int, str)): - return obj - else: - return id(obj) - - -WeakTypes = (ref, BoundMethodWeakref) - - -class annotatable_weakref(ref): - """A weakref.ref that supports custom instance attributes.""" - - receiver_id: t.Optional[IdentityType] - sender_id: t.Optional[IdentityType] - - -def reference( # type: ignore[no-untyped-def] - object, callback=None, **annotations -) -> annotatable_weakref: - """Return an annotated weak ref.""" - if callable(object): - weak = callable_reference(object, callback) - else: - weak = annotatable_weakref(object, callback) - for key, value in annotations.items(): - setattr(weak, key, value) - return weak # type: ignore[no-any-return] - - -def callable_reference(object, callback=None): - """Return an annotated weak ref, supporting bound instance methods.""" - if hasattr(object, "im_self") and object.im_self is not None: - return BoundMethodWeakref(target=object, on_delete=callback) - elif hasattr(object, "__self__") and object.__self__ is not None: - return BoundMethodWeakref(target=object, on_delete=callback) - return annotatable_weakref(object, callback) - - -class lazy_property: - """A @property that is only evaluated once.""" - - def __init__(self, deferred): - self._deferred = deferred - self.__doc__ = deferred.__doc__ - - def __get__(self, obj, cls): - if obj is None: - return self - value = self._deferred(obj) - setattr(obj, self._deferred.__name__, value) - return value - - -def is_coroutine_function(func: t.Any) -> bool: - # Python < 3.8 does not correctly determine partially wrapped - # coroutine functions are coroutine functions, hence the need for - # this to exist. Code taken from CPython. - if sys.version_info >= (3, 8): - return asyncio.iscoroutinefunction(func) - else: - # Note that there is something special about the AsyncMock - # such that it isn't determined as a coroutine function - # without an explicit check. - try: - from unittest.mock import AsyncMock # type: ignore[attr-defined] - - if isinstance(func, AsyncMock): - return True - except ImportError: - # Not testing, no asynctest to import - pass - - while inspect.ismethod(func): - func = func.__func__ - while isinstance(func, partial): - func = func.func - if not inspect.isfunction(func): - return False - - if func.__code__.co_flags & inspect.CO_COROUTINE: - return True - - acic = asyncio.coroutines._is_coroutine # type: ignore[attr-defined] - return getattr(func, "_is_coroutine", None) is acic diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/base.py b/venv_flaskchat/lib/python3.11/site-packages/blinker/base.py deleted file mode 100644 index 80e24e2..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/blinker/base.py +++ /dev/null @@ -1,551 +0,0 @@ -"""Signals and events. - -A small implementation of signals, inspired by a snippet of Django signal -API client code seen in a blog post. Signals are first-class objects and -each manages its own receivers and message emission. - -The :func:`signal` function provides singleton behavior for named signals. - -""" -from __future__ import annotations - -import typing as t -from collections import defaultdict -from contextlib import contextmanager -from warnings import warn -from weakref import WeakValueDictionary - -from blinker._utilities import annotatable_weakref -from blinker._utilities import hashable_identity -from blinker._utilities import IdentityType -from blinker._utilities import is_coroutine_function -from blinker._utilities import lazy_property -from blinker._utilities import reference -from blinker._utilities import symbol -from blinker._utilities import WeakTypes - -if t.TYPE_CHECKING: - import typing_extensions as te - - T_callable = t.TypeVar("T_callable", bound=t.Callable[..., t.Any]) - - T = t.TypeVar("T") - P = te.ParamSpec("P") - - AsyncWrapperType = t.Callable[[t.Callable[P, T]], t.Callable[P, t.Awaitable[T]]] - SyncWrapperType = t.Callable[[t.Callable[P, t.Awaitable[T]]], t.Callable[P, T]] - -ANY = symbol("ANY") -ANY.__doc__ = 'Token for "any sender".' -ANY_ID = 0 - - -class Signal: - """A notification emitter.""" - - #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY`` - #: without an additional import. - ANY = ANY - - @lazy_property - def receiver_connected(self) -> Signal: - """Emitted after each :meth:`connect`. - - The signal sender is the signal instance, and the :meth:`connect` - arguments are passed through: *receiver*, *sender*, and *weak*. - - .. versionadded:: 1.2 - - """ - return Signal(doc="Emitted after a receiver connects.") - - @lazy_property - def receiver_disconnected(self) -> Signal: - """Emitted after :meth:`disconnect`. - - The sender is the signal instance, and the :meth:`disconnect` arguments - are passed through: *receiver* and *sender*. - - Note, this signal is emitted **only** when :meth:`disconnect` is - called explicitly. - - The disconnect signal can not be emitted by an automatic disconnect - (due to a weakly referenced receiver or sender going out of scope), - as the receiver and/or sender instances are no longer available for - use at the time this signal would be emitted. - - An alternative approach is available by subscribing to - :attr:`receiver_connected` and setting up a custom weakref cleanup - callback on weak receivers and senders. - - .. versionadded:: 1.2 - - """ - return Signal(doc="Emitted after a receiver disconnects.") - - def __init__(self, doc: str | None = None) -> None: - """ - :param doc: optional. If provided, will be assigned to the signal's - __doc__ attribute. - - """ - if doc: - self.__doc__ = doc - #: A mapping of connected receivers. - #: - #: The values of this mapping are not meaningful outside of the - #: internal :class:`Signal` implementation, however the boolean value - #: of the mapping is useful as an extremely efficient check to see if - #: any receivers are connected to the signal. - self.receivers: dict[IdentityType, t.Callable | annotatable_weakref] = {} - self.is_muted = False - self._by_receiver: dict[IdentityType, set[IdentityType]] = defaultdict(set) - self._by_sender: dict[IdentityType, set[IdentityType]] = defaultdict(set) - self._weak_senders: dict[IdentityType, annotatable_weakref] = {} - - def connect( - self, receiver: T_callable, sender: t.Any = ANY, weak: bool = True - ) -> T_callable: - """Connect *receiver* to signal events sent by *sender*. - - :param receiver: A callable. Will be invoked by :meth:`send` with - `sender=` as a single positional argument and any ``kwargs`` that - were provided to a call to :meth:`send`. - - :param sender: Any object or :obj:`ANY`, defaults to ``ANY``. - Restricts notifications delivered to *receiver* to only those - :meth:`send` emissions sent by *sender*. If ``ANY``, the receiver - will always be notified. A *receiver* may be connected to - multiple *sender* values on the same Signal through multiple calls - to :meth:`connect`. - - :param weak: If true, the Signal will hold a weakref to *receiver* - and automatically disconnect when *receiver* goes out of scope or - is garbage collected. Defaults to True. - - """ - receiver_id = hashable_identity(receiver) - receiver_ref: T_callable | annotatable_weakref - - if weak: - receiver_ref = reference(receiver, self._cleanup_receiver) - receiver_ref.receiver_id = receiver_id - else: - receiver_ref = receiver - sender_id: IdentityType - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = hashable_identity(sender) - - self.receivers.setdefault(receiver_id, receiver_ref) - self._by_sender[sender_id].add(receiver_id) - self._by_receiver[receiver_id].add(sender_id) - del receiver_ref - - if sender is not ANY and sender_id not in self._weak_senders: - # wire together a cleanup for weakref-able senders - try: - sender_ref = reference(sender, self._cleanup_sender) - sender_ref.sender_id = sender_id - except TypeError: - pass - else: - self._weak_senders.setdefault(sender_id, sender_ref) - del sender_ref - - # broadcast this connection. if receivers raise, disconnect. - if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: - try: - self.receiver_connected.send( - self, receiver=receiver, sender=sender, weak=weak - ) - except TypeError as e: - self.disconnect(receiver, sender) - raise e - if receiver_connected.receivers and self is not receiver_connected: - try: - receiver_connected.send( - self, receiver_arg=receiver, sender_arg=sender, weak_arg=weak - ) - except TypeError as e: - self.disconnect(receiver, sender) - raise e - return receiver - - def connect_via( - self, sender: t.Any, weak: bool = False - ) -> t.Callable[[T_callable], T_callable]: - """Connect the decorated function as a receiver for *sender*. - - :param sender: Any object or :obj:`ANY`. The decorated function - will only receive :meth:`send` emissions sent by *sender*. If - ``ANY``, the receiver will always be notified. A function may be - decorated multiple times with differing *sender* values. - - :param weak: If true, the Signal will hold a weakref to the - decorated function and automatically disconnect when *receiver* - goes out of scope or is garbage collected. Unlike - :meth:`connect`, this defaults to False. - - The decorated function will be invoked by :meth:`send` with - `sender=` as a single positional argument and any ``kwargs`` that - were provided to the call to :meth:`send`. - - - .. versionadded:: 1.1 - - """ - - def decorator(fn: T_callable) -> T_callable: - self.connect(fn, sender, weak) - return fn - - return decorator - - @contextmanager - def connected_to( - self, receiver: t.Callable, sender: t.Any = ANY - ) -> t.Generator[None, None, None]: - """Execute a block with the signal temporarily connected to *receiver*. - - :param receiver: a receiver callable - :param sender: optional, a sender to filter on - - This is a context manager for use in the ``with`` statement. It can - be useful in unit tests. *receiver* is connected to the signal for - the duration of the ``with`` block, and will be disconnected - automatically when exiting the block: - - .. code-block:: python - - with on_ready.connected_to(receiver): - # do stuff - on_ready.send(123) - - .. versionadded:: 1.1 - - """ - self.connect(receiver, sender=sender, weak=False) - try: - yield None - except Exception as e: - self.disconnect(receiver) - raise e - else: - self.disconnect(receiver) - - @contextmanager - def muted(self) -> t.Generator[None, None, None]: - """Context manager for temporarily disabling signal. - Useful for test purposes. - """ - self.is_muted = True - try: - yield None - except Exception as e: - raise e - finally: - self.is_muted = False - - def temporarily_connected_to( - self, receiver: t.Callable, sender: t.Any = ANY - ) -> t.ContextManager[None]: - """An alias for :meth:`connected_to`. - - :param receiver: a receiver callable - :param sender: optional, a sender to filter on - - .. versionadded:: 0.9 - - .. versionchanged:: 1.1 - Renamed to :meth:`connected_to`. ``temporarily_connected_to`` was - deprecated in 1.2 and will be removed in a subsequent version. - - """ - warn( - "temporarily_connected_to is deprecated; use connected_to instead.", - DeprecationWarning, - ) - return self.connected_to(receiver, sender) - - def send( - self, - *sender: t.Any, - _async_wrapper: AsyncWrapperType | None = None, - **kwargs: t.Any, - ) -> list[tuple[t.Callable, t.Any]]: - """Emit this signal on behalf of *sender*, passing on ``kwargs``. - - Returns a list of 2-tuples, pairing receivers with their return - value. The ordering of receiver notification is undefined. - - :param sender: Any object or ``None``. If omitted, synonymous - with ``None``. Only accepts one positional argument. - :param _async_wrapper: A callable that should wrap a coroutine - receiver and run it when called synchronously. - - :param kwargs: Data to be sent to receivers. - """ - if self.is_muted: - return [] - - sender = self._extract_sender(sender) - results = [] - for receiver in self.receivers_for(sender): - if is_coroutine_function(receiver): - if _async_wrapper is None: - raise RuntimeError("Cannot send to a coroutine function") - receiver = _async_wrapper(receiver) - result = receiver(sender, **kwargs) # type: ignore[call-arg] - results.append((receiver, result)) - return results - - async def send_async( - self, - *sender: t.Any, - _sync_wrapper: SyncWrapperType | None = None, - **kwargs: t.Any, - ) -> list[tuple[t.Callable, t.Any]]: - """Emit this signal on behalf of *sender*, passing on ``kwargs``. - - Returns a list of 2-tuples, pairing receivers with their return - value. The ordering of receiver notification is undefined. - - :param sender: Any object or ``None``. If omitted, synonymous - with ``None``. Only accepts one positional argument. - :param _sync_wrapper: A callable that should wrap a synchronous - receiver and run it when awaited. - - :param kwargs: Data to be sent to receivers. - """ - if self.is_muted: - return [] - - sender = self._extract_sender(sender) - results = [] - for receiver in self.receivers_for(sender): - if not is_coroutine_function(receiver): - if _sync_wrapper is None: - raise RuntimeError("Cannot send to a non-coroutine function") - receiver = _sync_wrapper(receiver) # type: ignore[arg-type] - result = await receiver(sender, **kwargs) # type: ignore[call-arg, misc] - results.append((receiver, result)) - return results - - def _extract_sender(self, sender: t.Any) -> t.Any: - if not self.receivers: - # Ensure correct signature even on no-op sends, disable with -O - # for lowest possible cost. - if __debug__ and sender and len(sender) > 1: - raise TypeError( - f"send() accepts only one positional argument, {len(sender)} given" - ) - return [] - - # Using '*sender' rather than 'sender=None' allows 'sender' to be - # used as a keyword argument- i.e. it's an invisible name in the - # function signature. - if len(sender) == 0: - sender = None - elif len(sender) > 1: - raise TypeError( - f"send() accepts only one positional argument, {len(sender)} given" - ) - else: - sender = sender[0] - return sender - - def has_receivers_for(self, sender: t.Any) -> bool: - """True if there is probably a receiver for *sender*. - - Performs an optimistic check only. Does not guarantee that all - weakly referenced receivers are still alive. See - :meth:`receivers_for` for a stronger search. - - """ - if not self.receivers: - return False - if self._by_sender[ANY_ID]: - return True - if sender is ANY: - return False - return hashable_identity(sender) in self._by_sender - - def receivers_for( - self, sender: t.Any - ) -> t.Generator[t.Callable | annotatable_weakref, None, None]: - """Iterate all live receivers listening for *sender*.""" - # TODO: test receivers_for(ANY) - if self.receivers: - sender_id = hashable_identity(sender) - if sender_id in self._by_sender: - ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] - else: - ids = self._by_sender[ANY_ID].copy() - for receiver_id in ids: - receiver = self.receivers.get(receiver_id) - if receiver is None: - continue - if isinstance(receiver, WeakTypes): - strong = receiver() - if strong is None: - self._disconnect(receiver_id, ANY_ID) - continue - receiver = strong - yield receiver # type: ignore[misc] - - def disconnect(self, receiver: t.Callable, sender: t.Any = ANY) -> None: - """Disconnect *receiver* from this signal's events. - - :param receiver: a previously :meth:`connected` callable - - :param sender: a specific sender to disconnect from, or :obj:`ANY` - to disconnect from all senders. Defaults to ``ANY``. - - """ - sender_id: IdentityType - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = hashable_identity(sender) - receiver_id = hashable_identity(receiver) - self._disconnect(receiver_id, sender_id) - - if ( - "receiver_disconnected" in self.__dict__ - and self.receiver_disconnected.receivers - ): - self.receiver_disconnected.send(self, receiver=receiver, sender=sender) - - def _disconnect(self, receiver_id: IdentityType, sender_id: IdentityType) -> None: - if sender_id == ANY_ID: - if self._by_receiver.pop(receiver_id, False): - for bucket in self._by_sender.values(): - bucket.discard(receiver_id) - self.receivers.pop(receiver_id, None) - else: - self._by_sender[sender_id].discard(receiver_id) - self._by_receiver[receiver_id].discard(sender_id) - - def _cleanup_receiver(self, receiver_ref: annotatable_weakref) -> None: - """Disconnect a receiver from all senders.""" - self._disconnect(t.cast(IdentityType, receiver_ref.receiver_id), ANY_ID) - - def _cleanup_sender(self, sender_ref: annotatable_weakref) -> None: - """Disconnect all receivers from a sender.""" - sender_id = t.cast(IdentityType, sender_ref.sender_id) - assert sender_id != ANY_ID - self._weak_senders.pop(sender_id, None) - for receiver_id in self._by_sender.pop(sender_id, ()): - self._by_receiver[receiver_id].discard(sender_id) - - def _cleanup_bookkeeping(self) -> None: - """Prune unused sender/receiver bookkeeping. Not threadsafe. - - Connecting & disconnecting leave behind a small amount of bookkeeping - for the receiver and sender values. Typical workloads using Blinker, - for example in most web apps, Flask, CLI scripts, etc., are not - adversely affected by this bookkeeping. - - With a long-running Python process performing dynamic signal routing - with high volume- e.g. connecting to function closures, "senders" are - all unique object instances, and doing all of this over and over- you - may see memory usage will grow due to extraneous bookkeeping. (An empty - set() for each stale sender/receiver pair.) - - This method will prune that bookkeeping away, with the caveat that such - pruning is not threadsafe. The risk is that cleanup of a fully - disconnected receiver/sender pair occurs while another thread is - connecting that same pair. If you are in the highly dynamic, unique - receiver/sender situation that has lead you to this method, that - failure mode is perhaps not a big deal for you. - """ - for mapping in (self._by_sender, self._by_receiver): - for _id, bucket in list(mapping.items()): - if not bucket: - mapping.pop(_id, None) - - def _clear_state(self) -> None: - """Throw away all signal state. Useful for unit tests.""" - self._weak_senders.clear() - self.receivers.clear() - self._by_sender.clear() - self._by_receiver.clear() - - -receiver_connected = Signal( - """\ -Sent by a :class:`Signal` after a receiver connects. - -:argument: the Signal that was connected to -:keyword receiver_arg: the connected receiver -:keyword sender_arg: the sender to connect to -:keyword weak_arg: true if the connection to receiver_arg is a weak reference - -.. deprecated:: 1.2 - -As of 1.2, individual signals have their own private -:attr:`~Signal.receiver_connected` and -:attr:`~Signal.receiver_disconnected` signals with a slightly simplified -call signature. This global signal is planned to be removed in 1.6. - -""" -) - - -class NamedSignal(Signal): - """A named generic notification emitter.""" - - def __init__(self, name: str, doc: str | None = None) -> None: - Signal.__init__(self, doc) - - #: The name of this signal. - self.name = name - - def __repr__(self) -> str: - base = Signal.__repr__(self) - return f"{base[:-1]}; {self.name!r}>" - - -class Namespace(dict): - """A mapping of signal names to signals.""" - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` *name*, creating it if required. - - Repeated calls to this function will return the same signal object. - - """ - try: - return self[name] # type: ignore[no-any-return] - except KeyError: - result = self.setdefault(name, NamedSignal(name, doc)) - return result # type: ignore[no-any-return] - - -class WeakNamespace(WeakValueDictionary): - """A weak mapping of signal names to signals. - - Automatically cleans up unused Signals when the last reference goes out - of scope. This namespace implementation exists for a measure of legacy - compatibility with Blinker <= 1.2, and may be dropped in the future. - - .. versionadded:: 1.3 - - """ - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` *name*, creating it if required. - - Repeated calls to this function will return the same signal object. - - """ - try: - return self[name] # type: ignore[no-any-return] - except KeyError: - result = self.setdefault(name, NamedSignal(name, doc)) - return result # type: ignore[no-any-return] - - -signal = Namespace().signal diff --git a/venv_flaskchat/lib/python3.11/site-packages/blinker/py.typed b/venv_flaskchat/lib/python3.11/site-packages/blinker/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/LICENSE.rst deleted file mode 100644 index d12a849..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/METADATA deleted file mode 100644 index 8e5dc1e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/METADATA +++ /dev/null @@ -1,111 +0,0 @@ -Metadata-Version: 2.1 -Name: click -Version: 8.1.3 -Summary: Composable command line interface toolkit -Home-page: https://palletsprojects.com/p/click/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Changes, https://click.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/click/ -Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: colorama ; platform_system == "Windows" -Requires-Dist: importlib-metadata ; python_version < "3.8" - -\$ click\_ -========== - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U click - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - import click - - @click.command() - @click.option("--count", default=1, help="Number of greetings.") - @click.option("--name", prompt="Your name", help="The person to greet.") - def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - - if __name__ == '__main__': - hello() - -.. code-block:: text - - $ python hello.py --count=3 - Your name: Click - Hello, Click! - Hello, Click! - Hello, Click! - - -Donate ------- - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://click.palletsprojects.com/ -- Changes: https://click.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/click/ -- Source Code: https://github.com/pallets/click -- Issue Tracker: https://github.com/pallets/click/issues -- Website: https://palletsprojects.com/p/click -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/RECORD deleted file mode 100644 index 0c973f8..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/RECORD +++ /dev/null @@ -1,39 +0,0 @@ -click-8.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.1.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click-8.1.3.dist-info/METADATA,sha256=tFJIX5lOjx7c5LjZbdTPFVDJSgyv9F74XY0XCPp_gnc,3247 -click-8.1.3.dist-info/RECORD,, -click-8.1.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -click-8.1.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 -click/__init__.py,sha256=rQBLutqg-z6m8nOzivIfigDn_emijB_dKv9BZ2FNi5s,3138 -click/__pycache__/__init__.cpython-311.pyc,, -click/__pycache__/_compat.cpython-311.pyc,, -click/__pycache__/_termui_impl.cpython-311.pyc,, -click/__pycache__/_textwrap.cpython-311.pyc,, -click/__pycache__/_winconsole.cpython-311.pyc,, -click/__pycache__/core.cpython-311.pyc,, -click/__pycache__/decorators.cpython-311.pyc,, -click/__pycache__/exceptions.cpython-311.pyc,, -click/__pycache__/formatting.cpython-311.pyc,, -click/__pycache__/globals.cpython-311.pyc,, -click/__pycache__/parser.cpython-311.pyc,, -click/__pycache__/shell_completion.cpython-311.pyc,, -click/__pycache__/termui.cpython-311.pyc,, -click/__pycache__/testing.cpython-311.pyc,, -click/__pycache__/types.cpython-311.pyc,, -click/__pycache__/utils.cpython-311.pyc,, -click/_compat.py,sha256=JIHLYs7Jzz4KT9t-ds4o4jBzLjnwCiJQKqur-5iwCKI,18810 -click/_termui_impl.py,sha256=qK6Cfy4mRFxvxE8dya8RBhLpSC8HjF-lvBc6aNrPdwg,23451 -click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 -click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 -click/core.py,sha256=mz87bYEKzIoNYEa56BFAiOJnvt1Y0L-i7wD4_ZecieE,112782 -click/decorators.py,sha256=yo3zvzgUm5q7h5CXjyV6q3h_PJAiUaem178zXwdWUFI,16350 -click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 -click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 -click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 -click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=qOp_BeC9esEOSZKyu5G7RIxEUaLsXUX-mTb7hB1r4QY,18018 -click/termui.py,sha256=ACBQVOvFCTSqtD5VREeCAdRtlHd-Imla-Lte4wSfMjA,28355 -click/testing.py,sha256=ptpMYgRY7dVfE3UDgkgwayu9ePw98sQI3D7zZXiCpj4,16063 -click/types.py,sha256=rEb1aZSQKq3ciCMmjpG2Uva9vk498XRL7ThrcK2GRss,35805 -click/utils.py,sha256=33D6E7poH_nrKB-xr-UyDEXnxOcCiQqxuRLtrqeVv6o,18682 diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/top_level.txt deleted file mode 100644 index dca9a90..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click-8.1.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -click diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/click/__init__.py deleted file mode 100644 index e3ef423..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" -from .core import Argument as Argument -from .core import BaseCommand as BaseCommand -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import MultiCommand as MultiCommand -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .parser import OptionParser as OptionParser -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - -__version__ = "8.1.3" diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index c68686f..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc deleted file mode 100644 index 05adae1..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc deleted file mode 100644 index d81ef95..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc deleted file mode 100644 index 608834a..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc deleted file mode 100644 index 882c954..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc deleted file mode 100644 index 605c48a..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc deleted file mode 100644 index 5aeedb7..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc deleted file mode 100644 index 8c57d06..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc deleted file mode 100644 index b7966c6..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc deleted file mode 100644 index 5205000..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc deleted file mode 100644 index 31b0b73..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc deleted file mode 100644 index fe0ae52..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc deleted file mode 100644 index 2280ccb..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc deleted file mode 100644 index 93b3b76..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc deleted file mode 100644 index 9b62237..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc deleted file mode 100644 index 3e4f149..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/_compat.py b/venv_flaskchat/lib/python3.11/site-packages/click/_compat.py deleted file mode 100644 index 766d286..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/_compat.py +++ /dev/null @@ -1,626 +0,0 @@ -import codecs -import io -import os -import re -import sys -import typing as t -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) -# Determine local App Engine environment, per Google's own suggestion -APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( - "SERVER_SOFTWARE", "" -) -WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 -auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def get_filesystem_encoding() -> str: - return sys.getfilesystemencoding() or sys.getdefaultencoding() - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write("") # type: ignore - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO, default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO, default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - is_binary: t.Callable[[t.IO, bool], bool], - find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: t.Union[str, os.PathLike, int], - mode: str, - encoding: t.Optional[str], - errors: t.Optional[str], -) -> t.IO: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, -) -> t.Tuple[t.IO, bool]: - binary = "b" in mode - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: t.Optional[int] = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO, af), True - - -class _AtomicFile: - def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> "_AtomicFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi( - stream: t.TextIO, color: t.Optional[bool] = None - ) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s): - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] - ) -> t.Optional[t.TextIO]: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO] -) -> t.Callable[[], t.TextIO]: - cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO: - stream = src_func() - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: t.Mapping[ - str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] -] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/_termui_impl.py b/venv_flaskchat/lib/python3.11/site-packages/click/_termui_impl.py deleted file mode 100644 index 4b979bc..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,717 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" -import contextlib -import math -import os -import sys -import time -import typing as t -from gettext import gettext as _ - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: t.Optional[t.Iterable[V]], - length: t.Optional[int] = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - label: t.Optional[str] = None, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label = label or "" - if file is None: - file = _default_text_stdout() - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width = width - self.autowidth = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast(t.Iterable[V], range(length)) - self.iter = iter(iterable) - self.length = length - self.pos = 0 - self.avg: t.List[float] = [] - self.start = self.last_eta = time.time() - self.eta_known = False - self.finished = False - self.max_width: t.Optional[int] = None - self.entered = False - self.current_item: t.Optional[V] = None - self.is_hidden = not isatty(self.file) - self._last_line: t.Optional[str] = None - - def __enter__(self) -> "ProgressBar": - self.entered = True - self.render_progress() - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.render_finish() - - def __iter__(self) -> t.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.is_hidden: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.is_hidden: - # Only output the label as it changes if the output is not a - # TTY. Use file=stderr if you expect to be piping stdout. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) # type: ignore - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> t.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if self.is_hidden: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - pager_cmd = (os.environ.get("PAGER", None) or "").strip() - if pager_cmd: - if WIN: - return _tempfilepager(generator, pager_cmd, color) - return _pipepager(generator, pager_cmd, color) - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if WIN or sys.platform.startswith("os2"): - return _tempfilepager(generator, "more <", color) - if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: - return _pipepager(generator, "less", color) - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: - return _pipepager(generator, "more", color) - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - """ - import subprocess - - env = dict(os.environ) - - # If we're piping to less we might support colors under the - # condition that - cmd_detail = cmd.rsplit("/", 1)[-1].split() - if color is None and cmd_detail[0] == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) - stdin = t.cast(t.BinaryIO, c.stdin) - encoding = get_best_encoding(stdin) - try: - for text in generator: - if not color: - text = strip_ansi(text) - - stdin.write(text.encode(encoding, "replace")) - except (OSError, KeyboardInterrupt): - pass - else: - stdin.close() - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - -def _tempfilepager( - generator: t.Iterable[str], cmd: str, color: t.Optional[bool] -) -> None: - """Page through text by invoking a program on a temporary file.""" - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - os.system(f'{cmd} "{filename}"') - finally: - os.close(fd) - os.unlink(filename) - - -def _nullpager( - stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if os.system(f"which {editor} >/dev/null 2>&1") == 0: - return editor - return "vi" - - def edit_file(self, filename: str) -> None: - import subprocess - - editor = self.get_editor() - environ: t.Optional[t.Dict[str, str]] = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - try: - c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: - import tempfile - - if not text: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_file(name) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url.replace('"', "")) - args = f'explorer /select,"{url}"' - else: - url = url.replace('"', "") - wait_str = "/WAIT" if wait else "" - args = f'start {wait_str} "" "{url}"' - return os.system(args) - elif CYGWIN: - if locate: - url = os.path.dirname(_unquote_file(url).replace('"', "")) - args = f'cygstart "{url}"' - else: - url = url.replace('"', "") - wait_str = "-w" if wait else "" - args = f'cygstart {wait_str} "{url}"' - return os.system(args) - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if WIN: - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - func: t.Callable[[], str] - - if echo: - func = msvcrt.getwche # type: ignore - else: - func = msvcrt.getwch # type: ignore - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import tty - import termios - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - f: t.Optional[t.TextIO] - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/_textwrap.py b/venv_flaskchat/lib/python3.11/site-packages/click/_textwrap.py deleted file mode 100644 index b47dcbd..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/_textwrap.py +++ /dev/null @@ -1,49 +0,0 @@ -import textwrap -import typing as t -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: t.List[str], - cur_line: t.List[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> t.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/_winconsole.py b/venv_flaskchat/lib/python3.11/site-packages/click/_winconsole.py deleted file mode 100644 index 6b20df3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/_winconsole.py +++ /dev/null @@ -1,279 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -import io -import sys -import time -import typing as t -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj, writable=False): - buf = Py_buffer() - flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - return buffer_type.from_address(buf.buf) - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle): - self.handle = handle - - def isatty(self): - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self): - return True - - def readinto(self, b): - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self): - return True - - @staticmethod - def _get_error_message(errno): - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b): - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self): - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> t.Optional[t.TextIO]: - if ( - get_buffer is not None - and encoding in {"utf-16-le", None} - and errors in {"strict", None} - and _is_console(f) - ): - func = _stream_factories.get(f.fileno()) - if func is not None: - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/core.py b/venv_flaskchat/lib/python3.11/site-packages/click/core.py deleted file mode 100644 index 5abfb0f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/core.py +++ /dev/null @@ -1,2998 +0,0 @@ -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from contextlib import contextmanager -from contextlib import ExitStack -from functools import partial -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat - -from . import types -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import OptionParser -from .parser import split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - import typing_extensions as te - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: "Context", incomplete: str -) -> t.Iterator[t.Tuple[str, "Command"]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(MultiCommand, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_multicommand( - base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, MultiCommand): - return - if register: - hint = ( - "It is not possible to add multi commands as children to" - " another multi command that is in chain mode." - ) - else: - hint = ( - "Found a multi command as subcommand to a multi command" - " that is in chain mode. This is not supported." - ) - raise RuntimeError( - f"{hint}. Command {base_command.name!r} is set to chain and" - f" {cmd_name!r} was added as a subcommand but it in itself is a" - f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" - f" within a chained {type(base_command).__name__} named" - f" {base_command.name!r})." - ) - - -def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size))) - - -@contextmanager -def augment_usage_errors( - ctx: "Context", param: t.Optional["Parameter"] = None -) -> t.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: t.Sequence["Parameter"], - declaration_order: t.Sequence["Parameter"], -) -> t.List["Parameter"]: - """Given a sequence of parameters in the order as should be considered - for processing and an iterable of parameters that exist, this returns - a list in the correct order as they should be processed. - """ - - def sort_key(item: "Parameter") -> t.Tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: t.Type["HelpFormatter"] = HelpFormatter - - def __init__( - self, - command: "Command", - parent: t.Optional["Context"] = None, - info_name: t.Optional[str] = None, - obj: t.Optional[t.Any] = None, - auto_envvar_prefix: t.Optional[str] = None, - default_map: t.Optional[t.Dict[str, t.Any]] = None, - terminal_width: t.Optional[int] = None, - max_content_width: t.Optional[int] = None, - resilient_parsing: bool = False, - allow_extra_args: t.Optional[bool] = None, - allow_interspersed_args: t.Optional[bool] = None, - ignore_unknown_options: t.Optional[bool] = None, - help_option_names: t.Optional[t.List[str]] = None, - token_normalize_func: t.Optional[t.Callable[[str], str]] = None, - color: t.Optional[bool] = None, - show_default: t.Optional[bool] = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: t.Dict[str, t.Any] = {} - #: the leftover arguments. - self.args: t.List[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self.protected_args: t.List[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: t.Optional[str] = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: t.Optional[int] = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: t.Optional[int] = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: t.List[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Optional[ - t.Callable[[str], str] - ] = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: t.Optional[bool] = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: t.Optional[bool] = show_default - - self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: t.Dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> "Context": - self._depth += 1 - push_context(self) - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> t.Dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: t.ContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> "Context": - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: - """Finds the closest object of a given type.""" - node: t.Optional["Context"] = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: t.Type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[False]" = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> "te.NoReturn": - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> "te.NoReturn": - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> "te.NoReturn": - """Exits the application with a given exit code.""" - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: "Command") -> "Context": - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - def invoke( - __self, # noqa: B902 - __callback: t.Union["Command", t.Callable[..., t.Any]], - *args: t.Any, - **kwargs: t.Any, - ) -> t.Any: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - Note that before Click 3.2 keyword arguments were not properly filled - in against the intention of this code and no context was created. For - more information about this change and why it was done in a bugfix - release see :ref:`upgrade-to-3.2`. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - """ - if isinstance(__callback, Command): - other_cmd = __callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - __callback = other_cmd.callback - - ctx = __self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = __self - - with augment_usage_errors(__self): - with ctx: - return __callback(*args, **kwargs) - - def forward( - __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 - ) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(__cmd, Command): - raise TypeError("Callback is not a command.") - - for param in __self.params: - if param not in kwargs: - kwargs[param] = __self.params[param] - - return __self.invoke(__cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class BaseCommand: - """The base command implements the minimal API contract of commands. - Most code will never use this as it does not implement a lot of useful - functionality but it can act as the direct subclass of alternative - parsing methods that do not depend on the Click parser. - - For instance, this can be used to bridge Click and other systems like - argparse or docopt. - - Because base commands do not implement a lot of the API that other - parts of Click take for granted, they are not supported for all - operations. For instance, they cannot be used with the decorators - usually and they have no built-in callback system. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: t.Type[Context] = Context - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.Dict[str, t.Any]] = None, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: t.Dict[str, t.Any] = context_settings - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire structure - below this command. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - :param ctx: A :class:`Context` representing this command. - - .. versionadded:: 8.0 - """ - return {"name": self.name} - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get usage") - - def get_help(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get help") - - def make_context( - self, - info_name: t.Optional[str], - args: t.List[str], - parent: t.Optional[Context] = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class( - self, info_name=info_name, parent=parent, **extra # type: ignore - ) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - """Given a context and a list of arguments this creates the parser - and parses the arguments, then modifies the context as necessary. - This is automatically invoked by :meth:`make_context`. - """ - raise NotImplementedError("Base commands do not know how to parse arguments.") - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the command. The default - implementation is raising a not implemented error. - """ - raise NotImplementedError("Base commands are not invokable by default") - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. Other - command classes will return more completions. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, MultiCommand) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx.protected_args - ) - - return results - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: "te.Literal[True]" = True, - **extra: t.Any, - ) -> "te.NoReturn": - ... - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: - ... - - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt): - echo(file=sys.stderr) - raise Abort() from None - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: t.Optional[str] = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - """ - if complete_var is None: - complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class Command(BaseCommand): - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - - :param deprecated: issues a message indicating that - the command is deprecated. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.Dict[str, t.Any]] = None, - callback: t.Optional[t.Callable[..., t.Any]] = None, - params: t.Optional[t.List["Parameter"]] = None, - help: t.Optional[str] = None, - epilog: t.Optional[str] = None, - short_help: t.Optional[str] = None, - options_metavar: t.Optional[str] = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool = False, - ) -> None: - super().__init__(name, context_settings) - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: t.List["Parameter"] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - info_dict.update( - params=[param.to_info_dict() for param in self.get_params(ctx)], - help=self.help, - epilog=self.epilog, - short_help=self.short_help, - hidden=self.hidden, - deprecated=self.deprecated, - ) - return info_dict - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> t.List["Parameter"]: - rv = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - rv = [*rv, help_option] - - return rv - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> t.List[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> t.Optional["Option"]: - """Returns the help option object.""" - help_options = self.get_help_option_names(ctx) - - if not help_options or not self.add_help_option: - return None - - def show_help(ctx: Context, param: "Parameter", value: str) -> None: - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - return Option( - help_options, - is_flag=True, - is_eager=True, - expose_value=False, - callback=show_help, - help=_("Show this message and exit."), - ) - - def make_parser(self, ctx: Context) -> OptionParser: - """Creates the underlying option parser for this command.""" - parser = OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - text = self.help if self.help is not None else "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - if text: - text = inspect.cleandoc(text).partition("\f")[0] - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - message = _( - "DeprecationWarning: The command {name!r} is deprecated." - ).format(name=self.name) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class MultiCommand(Command): - """A multi command is the basic implementation of a command that - dispatches to subcommands. The most common version is the - :class:`Group`. - - :param invoke_without_command: this controls how the multi command itself - is invoked. By default it's only invoked - if a subcommand is provided. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is enabled by default if - `invoke_without_command` is disabled or disabled - if it's enabled. If enabled this will add - ``--help`` as argument if no arguments are - passed. - :param subcommand_metavar: the string that is used in the documentation - to indicate the subcommand place. - :param chain: if this is set to `True` chaining of multiple subcommands - is enabled. This restricts the form of commands in that - they cannot have optional arguments but it allows - multiple commands to be chained together. - :param result_callback: The result callback to attach to this multi - command. This can be set or changed later with the - :meth:`result_callback` decorator. - """ - - allow_extra_args = True - allow_interspersed_args = False - - def __init__( - self, - name: t.Optional[str] = None, - invoke_without_command: bool = False, - no_args_is_help: t.Optional[bool] = None, - subcommand_metavar: t.Optional[str] = None, - chain: bool = False, - result_callback: t.Optional[t.Callable[..., t.Any]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "Multi commands in chain mode cannot have" - " optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(__value, *args, **kwargs): # type: ignore - inner = old_callback(__value, *args, **kwargs) # type: ignore - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv - - return decorator - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx.protected_args = rest - ctx.args = [] - elif rest: - ctx.protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx.protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx.protected_args, *ctx.args] - ctx.args = [] - ctx.protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: t.List[str] - ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if split_opt(cmd_name)[0]: - self.parse_args(ctx, ctx.args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - """Given a context and a command name, this returns a - :class:`Command` object if it exists or returns `None`. - """ - raise NotImplementedError - - def list_commands(self, ctx: Context) -> t.List[str]: - """Returns a list of subcommand names in the order they should - appear. - """ - return [] - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class Group(MultiCommand): - """A group allows a command to have subcommands attached. This is - the most common way to implement nesting in Click. - - :param name: The name of the group command. - :param commands: A dict mapping names to :class:`Command` objects. - Can also be a list of :class:`Command`, which will use - :attr:`Command.name` to create the dict. - :param attrs: Other command arguments described in - :class:`MultiCommand`, :class:`Command`, and - :class:`BaseCommand`. - - .. versionchanged:: 8.0 - The ``commmands`` argument can be a list of command objects. - """ - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: t.Optional[t.Type[Command]] = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: t.Optional[str] = None, - commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: t.Dict[str, Command] = commands - - def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_multicommand(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: - ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: - ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - func: t.Optional[t.Callable] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'command(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> "Group": - ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: - ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Optional[t.Callable] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'group(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> "Group": - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> t.List[str]: - return sorted(self.commands) - - -class CommandCollection(MultiCommand): - """A command collection is a multi command that merges multiple multi - commands together into one. This is a straightforward implementation - that accepts a list of different multi commands as sources and - provides all the commands for each of them. - """ - - def __init__( - self, - name: t.Optional[str] = None, - sources: t.Optional[t.List[MultiCommand]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - #: The list of registered multi commands. - self.sources: t.List[MultiCommand] = sources or [] - - def add_source(self, multi_cmd: MultiCommand) -> None: - """Adds a new multi command to the chain dispatcher.""" - self.sources.append(multi_cmd) - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_multicommand(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> t.List[str]: - rv: t.Set[str] = set() - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> t.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The later is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - required: bool = False, - default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, - callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, - nargs: t.Optional[int] = None, - multiple: bool = False, - metavar: t.Optional[str] = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, - shell_complete: t.Optional[ - t.Callable[ - [Context, "Parameter", str], - t.Union[t.List["CompletionItem"], t.List[str]], - ] - ] = None, - ) -> None: - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(self) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, t.Any] - ) -> t.Tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> t.Iterator: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - convert: t.Callable[[t.Any], t.Any] = partial( - self.type, param=self, ctx=ctx - ) - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Tuple: - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Tuple: - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] - ) -> t.Tuple[t.Any, t.List[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - pass - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast(t.List["CompletionItem"], results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page. Normally, environment variables are not - shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - - .. versionchanged:: 8.1.0 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1.0 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1.0 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - show_default: t.Union[bool, str, None] = None, - prompt: t.Union[bool, str] = False, - confirmation_prompt: t.Union[bool, str] = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: t.Optional[bool] = None, - flag_value: t.Optional[t.Any] = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - help: t.Optional[str] = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - default_is_missing = "default" not in attrs - super().__init__(param_decls, type=type, multiple=multiple, **attrs) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - if is_flag and default_is_missing and not self.required: - self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False - - if flag_value is None: - flag_value = not self.default - - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - if self.multiple and self.is_flag: - raise TypeError("'multiple' is not valid with 'is_flag', use 'count'.") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError("Could not determine name for option") - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: t.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar()}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - extra = [] - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - var_str = ( - envvar - if isinstance(envvar, str) - else ", ".join(str(d) for d in envvar) - ) - extra.append(_("env var: {var}").format(var=var_str)) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or (show_default and (default_value is not None)): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = split_opt( - (self.opts if self.default else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - else: - default_string = str(default_value) - - if default_string: - extra.append(_("default: {default}").format(default=default_string)) - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra.append(range_str) - - if self.required: - extra.append(_("required")) - - if extra: - extra_str = "; ".join(extra) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return param.flag_value # type: ignore - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - ) - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, "Parameter"] - ) -> t.Tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the parameter constructor. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: t.Sequence[str], - required: t.Optional[bool] = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(self) - if not var: - var = self.name.upper() # type: ignore - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Could not determine name for argument") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [self.make_metavar()] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar()}'" - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/decorators.py b/venv_flaskchat/lib/python3.11/site-packages/click/decorators.py deleted file mode 100644 index 28618dc..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/decorators.py +++ /dev/null @@ -1,497 +0,0 @@ -import inspect -import types -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -FC = t.TypeVar("FC", bound=t.Union[t.Callable[..., t.Any], Command]) - - -def pass_context(f: F) -> F: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args, **kwargs): # type: ignore - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def pass_obj(f: F) -> F: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args, **kwargs): # type: ignore - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def make_pass_decorator( - object_type: t.Type, ensure: bool = False -) -> "t.Callable[[F], F]": - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: F) -> F: - def new_func(*args, **kwargs): # type: ignore - ctx = get_current_context() - - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: t.Optional[str] = None -) -> "t.Callable[[F], F]": - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: F) -> F: - def new_func(*args, **kwargs): # type: ignore - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -@t.overload -def command( - __func: t.Callable[..., t.Any], -) -> Command: - ... - - -@t.overload -def command( - name: t.Optional[str] = None, - **attrs: t.Any, -) -> t.Callable[..., Command]: - ... - - -@t.overload -def command( - name: t.Optional[str] = None, - cls: t.Type[CmdType] = ..., - **attrs: t.Any, -) -> t.Callable[..., CmdType]: - ... - - -def command( - name: t.Union[str, t.Callable[..., t.Any], None] = None, - cls: t.Optional[t.Type[Command]] = None, - **attrs: t.Any, -) -> t.Union[Command, t.Callable[..., Command]]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function with - underscores replaced by dashes. If you want to change that, you can - pass the intended name as the first argument. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: the name of the command. This defaults to the function - name with underscores replaced by dashes. - :param cls: the command class to instantiate. This defaults to - :class:`Command`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Optional[t.Callable[..., t.Any]] = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = Command - - def decorator(f: t.Callable[..., t.Any]) -> Command: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - cmd = cls( # type: ignore[misc] - name=name or f.__name__.lower().replace("_", "-"), # type: ignore[arg-type] - callback=f, - params=params, - **attrs, - ) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -@t.overload -def group( - __func: t.Callable[..., t.Any], -) -> Group: - ... - - -@t.overload -def group( - name: t.Optional[str] = None, - **attrs: t.Any, -) -> t.Callable[[F], Group]: - ... - - -def group( - name: t.Union[str, t.Callable[..., t.Any], None] = None, **attrs: t.Any -) -> t.Union[Group, t.Callable[[F], Group]]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if attrs.get("cls") is None: - attrs["cls"] = Group - - if callable(name): - grp: t.Callable[[F], Group] = t.cast(Group, command(**attrs)) - return grp(name) - - return t.cast(Group, command(name, **attrs)) - - -def _param_memo(f: FC, param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - """ - - def decorator(f: FC) -> FC: - ArgumentClass = attrs.pop("cls", None) or Argument - _param_memo(f, ArgumentClass(param_decls, **attrs)) - return f - - return decorator - - -def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - """ - - def decorator(f: FC) -> FC: - # Issue 926, copy attrs, so pre-defined options can re-use the same cls= - option_attrs = attrs.copy() - OptionClass = option_attrs.pop("cls", None) or Option - _param_memo(f, OptionClass(param_decls, **option_attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: t.Optional[str] = None, - *param_decls: str, - package_name: t.Optional[str] = None, - prog_name: t.Optional[str] = None, - message: t.Optional[str] = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. On Python < 3.8, the ``importlib_metadata`` - backport must be installed. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - metadata: t.Optional[types.ModuleType] - - try: - from importlib import metadata # type: ignore - except ImportError: - # Python < 3.8 - import importlib_metadata as metadata # type: ignore - - try: - version = metadata.version(package_name) # type: ignore - except metadata.PackageNotFoundError: # type: ignore - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - t.cast(str, message) - % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--help`` option which immediately prints the help page - and exits the program. - - This is usually unnecessary, as the ``--help`` option is added to - each command automatically unless ``add_help_option=False`` is - passed. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/exceptions.py b/venv_flaskchat/lib/python3.11/site-packages/click/exceptions.py deleted file mode 100644 index 9e20b3e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/exceptions.py +++ /dev/null @@ -1,287 +0,0 @@ -import os -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .utils import echo - -if t.TYPE_CHECKING: - from .core import Context - from .core import Parameter - - -def _join_param_hints( - param_hint: t.Optional[t.Union[t.Sequence[str], str]] -) -> t.Optional[str]: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.Optional[t.IO] = None) -> None: - if file is None: - file = get_text_stderr() - - echo(_("Error: {message}").format(message=self.format_message()), file=file) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd = self.ctx.command if self.ctx else None - - def show(self, file: t.Optional[t.IO] = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: t.Optional[str] = None, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - param_type: t.Optional[str] = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: t.Optional[str] = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message(self.param) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: t.Optional[str] = None, - possibilities: t.Optional[t.Sequence[str]] = None, - ctx: t.Optional["Context"] = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: t.Optional["Context"] = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename = os.fsdecode(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code = code diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/formatting.py b/venv_flaskchat/lib/python3.11/site-packages/click/formatting.py deleted file mode 100644 index ddd2a2f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -import typing as t -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: t.Optional[int] = None - - -def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: - widths: t.Dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: t.Iterable[t.Tuple[str, str]], col_count: int -) -> t.Iterator[t.Tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: t.List[t.Tuple[int, bool, str]] = [] - buf: t.List[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: t.Optional[int] = None, - max_width: t.Optional[int] = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent = 0 - self.buffer: t.List[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage( - self, prog: str, args: str = "", prefix: t.Optional[str] = None - ) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: t.Sequence[t.Tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> t.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> t.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/globals.py b/venv_flaskchat/lib/python3.11/site-packages/click/globals.py deleted file mode 100644 index 480058f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/globals.py +++ /dev/null @@ -1,68 +0,0 @@ -import typing as t -from threading import local - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: "te.Literal[False]" = False) -> "Context": - ... - - -@t.overload -def get_current_context(silent: bool = ...) -> t.Optional["Context"]: - ... - - -def get_current_context(silent: bool = False) -> t.Optional["Context"]: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: "Context") -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/parser.py b/venv_flaskchat/lib/python3.11/site-packages/click/parser.py deleted file mode 100644 index 2d5a2ed..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/parser.py +++ /dev/null @@ -1,529 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: t.Sequence[str], nargs_spec: t.Sequence[int] -) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] - spos: t.Optional[int] = None - - def _fetch(c: "te.Deque[V]") -> t.Optional[V]: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def split_opt(opt: str) -> t.Tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -def split_arg_string(string: str) -> t.List[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -class Option: - def __init__( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes = set() - - for opt in opts: - prefix, value = split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: str, state: "ParsingState") -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class Argument: - def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], - state: "ParsingState", - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class ParsingState: - def __init__(self, rargs: t.List[str]) -> None: - self.opts: t.Dict[str, t.Any] = {} - self.largs: t.List[str] = [] - self.rargs = rargs - self.order: t.List["CoreParameter"] = [] - - -class OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - """ - - def __init__(self, ctx: t.Optional["Context"] = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: t.Dict[str, Option] = {} - self._long_opt: t.Dict[str, Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: t.List[Argument] = [] - - def add_option( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [normalize_opt(opt, self.ctx) for opt in opts] - option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument( - self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 - ) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: t.List[str] - ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: t.Optional[str], state: ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we re-combinate the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: Option, state: ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/py.typed b/venv_flaskchat/lib/python3.11/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/shell_completion.py b/venv_flaskchat/lib/python3.11/site-packages/click/shell_completion.py deleted file mode 100644 index c17a8e6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/shell_completion.py +++ /dev/null @@ -1,580 +0,0 @@ -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import BaseCommand -from .core import Context -from .core import MultiCommand -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .parser import split_arg_string -from .utils import echo - - -def shell_complete( - cli: BaseCommand, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: t.Optional[str] = None, - **kwargs: t.Any, - ) -> None: - self.value = value - self.type = type - self.help = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -compdef %(complete_func)s %(prog_name)s; -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response; - - for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - set response $response $value; - end; - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: BaseCommand, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> t.Dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions( - self, args: t.List[str], incomplete: str - ) -> t.List[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - def _check_version(self) -> None: - import subprocess - - output = subprocess.run( - ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - raise RuntimeError( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ) - ) - else: - raise RuntimeError( - _("Couldn't detect Bash version, shell completion is not supported.") - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -_available_shells: t.Dict[str, t.Type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: t.Type[ShellComplete], name: t.Optional[str] = None -) -> None: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - -def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - value = ctx.params[param.name] - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str] -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - ctx = cli.make_context(prog_name, args.copy(), **ctx_args) - args = ctx.protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, MultiCommand): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) - args = ctx.protected_args + ctx.args - else: - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - sub_ctx = cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx.protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: t.List[str], incomplete: str -) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/termui.py b/venv_flaskchat/lib/python3.11/site-packages/click/termui.py deleted file mode 100644 index bfb2f5a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/termui.py +++ /dev/null @@ -1,787 +0,0 @@ -import inspect -import io -import itertools -import os -import sys -import typing as t -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from ._compat import WIN -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Optional[t.Any] = None, - show_choices: bool = True, - type: t.Optional[ParamType] = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name # type: ignore - - return default - - -def prompt( - text: str, - default: t.Optional[t.Any] = None, - hide_input: bool = False, - confirmation_prompt: t.Union[bool, str] = False, - type: t.Optional[t.Union[ParamType, t.Any]] = None, - value_proc: t.Optional[t.Callable[[str], t.Any]] = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: t.Optional[bool] = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], - color: t.Optional[bool] = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast(t.Iterable[str], text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -def progressbar( - iterable: t.Optional[t.Iterable[V]] = None, - length: t.Optional[int] = None, - label: t.Optional[str] = None, - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, -) -> "ProgressBar[V]": - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - Added the ``update_min_steps`` parameter. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. Added the ``update`` method to - the object. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - if WIN: - os.system("cls") - else: - sys.stdout.write("\033[2J\033[1;1H") - - -def _interpret_color( - color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 -) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bold: t.Optional[bool] = None, - dim: t.Optional[bool] = None, - underline: t.Optional[bool] = None, - overline: t.Optional[bool] = None, - italic: t.Optional[bool] = None, - blink: t.Optional[bool] = None, - reverse: t.Optional[bool] = None, - strikethrough: t.Optional[bool] = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.AnyStr]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -def edit( - text: t.Optional[t.AnyStr] = None, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - filename: t.Optional[str] = None, -) -> t.Optional[t.AnyStr]: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - ed.edit_file(filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Optional[t.Callable[[bool], str]] = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> t.ContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: t.Optional[str] = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/testing.py b/venv_flaskchat/lib/python3.11/site-packages/click/testing.py deleted file mode 100644 index e395c2e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/testing.py +++ /dev/null @@ -1,479 +0,0 @@ -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from .core import BaseCommand - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> t.List[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> t.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: t.Optional[t.Union[str, bytes, t.IO]], charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast(t.IO, input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(t.cast(bytes, input)) - - -class Result: - """Holds the captured result of an invoked CLI script.""" - - def __init__( - self, - runner: "CliRunner", - stdout_bytes: bytes, - stderr_bytes: t.Optional[bytes], - return_value: t.Any, - exit_code: int, - exception: t.Optional[BaseException], - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ] = None, - ): - #: The runner that created the result - self.runner = runner - #: The standard output as bytes. - self.stdout_bytes = stdout_bytes - #: The standard error as bytes, or None if not available - self.stderr_bytes = stderr_bytes - #: The value returned from the invoked command. - #: - #: .. versionadded:: 8.0 - self.return_value = return_value - #: The exit code as integer. - self.exit_code = exit_code - #: The exception that happened if one did. - self.exception = exception - #: The traceback - self.exc_info = exc_info - - @property - def output(self) -> str: - """The (standard) output as unicode string.""" - return self.stdout - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string.""" - if self.stderr_bytes is None: - raise ValueError("stderr not separately captured") - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from stdin writes - to stdout. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param mix_stderr: if this is set to `False`, then stdout and stderr are - preserved as independent streams. This is useful for - Unix-philosophy apps that have predictable stdout and - noisy stderr, such that each may be measured - independently - """ - - def __init__( - self, - charset: str = "utf-8", - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - echo_stdin: bool = False, - mix_stderr: bool = True, - ) -> None: - self.charset = charset - self.env = env or {} - self.echo_stdin = echo_stdin - self.mix_stderr = mix_stderr - - def get_default_prog_name(self, cli: "BaseCommand") -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None - ) -> t.Mapping[str, t.Optional[str]]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: t.Optional[t.Union[str, bytes, t.IO]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - color: bool = False, - ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up stdin with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into sys.stdin. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - ``stderr`` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - bytes_output = io.BytesIO() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, bytes_output) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - bytes_output, encoding=self.charset, name="", mode="w" - ) - - bytes_error = None - if self.mix_stderr: - sys.stderr = sys.stdout - else: - bytes_error = io.BytesIO() - sys.stderr = _NamedTextIOWrapper( - bytes_error, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(prompt or "") - val = text_input.readline().rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return text_input.readline().rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (bytes_output, bytes_error) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: "BaseCommand", - args: t.Optional[t.Union[str, t.Sequence[str]]] = None, - input: t.Optional[t.Union[str, bytes, t.IO]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - catch_exceptions: bool = True, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: t.Optional[BaseException] = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - stdout = outstreams[0].getvalue() - if self.mix_stderr: - stderr = None - else: - stderr = outstreams[1].getvalue() # type: ignore - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None - ) -> t.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) # type: ignore[type-var] - os.chdir(dt) - - try: - yield t.cast(str, dt) - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(dt) - except OSError: # noqa: B014 - pass diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/types.py b/venv_flaskchat/lib/python3.11/site-packages/click/types.py deleted file mode 100644 index b45ee53..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/types.py +++ /dev/null @@ -1,1073 +0,0 @@ -import os -import stat -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import get_filesystem_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[t.Optional[str]] = None - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: "Parameter") -> t.Optional[str]: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: "Parameter") -> t.Optional[str]: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> t.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> "t.NoReturn": - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name = func.__name__ - self.func = func - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = get_filesystem_encoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set - of supported values. All of these values have to be strings. - - You should only pass a list or tuple of choices. Other iterables - (like generators) may lead to surprising results. - - The resulting value will always be one of the originally passed choices - regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` - being specified. - - See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - """ - - name = "choice" - - def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: - self.choices = choices - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - choices_str = "|".join(self.choices) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: "Parameter") -> str: - return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - # Match through normalization and case sensitivity - # first do token_normalize_func, then lowercase - # preserve original `value` to produce an accurate message in - # `self.fail` - normed_value = value - normed_choices = {choice: choice for choice in self.choices} - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(value) - normed_choices = { - ctx.token_normalize_func(normed_choice): original - for normed_choice, original in normed_choices.items() - } - - if not self.case_sensitive: - normed_value = normed_value.casefold() - normed_choices = { - normed_choice.casefold(): original - for normed_choice, original in normed_choices.items() - } - - if normed_value in normed_choices: - return normed_choices[normed_value] - - choices_str = ", ".join(map(repr, self.choices)) - self.fail( - ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: t.Optional[t.Sequence[str]] = None): - self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[t.Type] - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: "te.Literal[1, -1]", open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - if not open: - return bound - - # Could use Python 3.9's math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Starting with Click 2.0, files can also be opened atomically in which - case all writes go into a separate file in the same folder and upon - completion the file will be moved over to the original location. This - is useful if a file regularly read by other users is modified. - - See :ref:`file-args` for more information. - """ - - name = "filename" - envvar_list_splitter = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: t.Optional[bool] = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: t.Any) -> bool: - if self.lazy is not None: - return self.lazy - if value == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - if hasattr(value, "read") or hasattr(value, "write"): - return value - - lazy = self.resolve_lazy_flag(value) - - if lazy: - f: t.IO = t.cast( - t.IO, - LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ), - ) - - if ctx is not None: - ctx.call_on_close(f.close_intelligently) # type: ignore - - return f - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: # noqa: B014 - self.fail(f"'{os.fsdecode(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: t.Optional[t.Type] = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result(self, rv: t.Any) -> t.Any: - if self.type is not None and not isinstance(rv, self.type): - if self.type is str: - rv = os.fsdecode(rv) - elif self.type is bytes: - rv = os.fsencode(rv) - else: - rv = self.type(rv) - - return rv - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - # os.path.realpath doesn't resolve symlinks on Windows - # until Python 3.8. Use pathlib for now. - import pathlib - - rv = os.fsdecode(pathlib.Path(rv).resolve()) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} '{filename}' is a directory.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None: - self.types = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) - - -def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() diff --git a/venv_flaskchat/lib/python3.11/site-packages/click/utils.py b/venv_flaskchat/lib/python3.11/site-packages/click/utils.py deleted file mode 100644 index 8283788..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/click/utils.py +++ /dev/null @@ -1,580 +0,0 @@ -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import get_filesystem_encoding -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: F) -> F: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args, **kwargs): # type: ignore - try: - return func(*args, **kwargs) - except Exception: - pass - - return update_wrapper(t.cast(F, wrapper), func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(get_filesystem_encoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, - ): - self.name = filename - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.Optional[t.IO] - - if filename == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: # noqa: E402 - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> "LazyFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.close_intelligently() - - def __iter__(self) -> t.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO) -> None: - self._file = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> "KeepOpenFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> t.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.Any]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: t.Optional[t.Union[str, bytes]] = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: "te.Literal['stdin', 'stdout', 'stderr']", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic)) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast(t.IO, KeepOpenFile(f)) - - return f - - -def format_filename( - filename: t.Union[str, bytes, os.PathLike], shorten: bool = False -) -> str: - """Formats a filename for user display. The main purpose of this - function is to ensure that the filename can be displayed at all. This - will decode the filename to unicode if necessary in a way that it will - not fail. Optionally, it can shorten the filename to not include the - full path to the filename. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - - return os.fsdecode(filename) - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no affect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - if getattr(_main, "__package__", None) is None or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: t.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> t.List[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/venv_flaskchat/lib/python3.11/site-packages/distutils-precedence.pth b/venv_flaskchat/lib/python3.11/site-packages/distutils-precedence.pth deleted file mode 100644 index 7f009fe..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/__init__.py deleted file mode 100644 index a4c21bb..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -import sys - -from .client import Client -from .middleware import WSGIApp, Middleware -from .server import Server -if sys.version_info >= (3, 5): # pragma: no cover - from .asyncio_server import AsyncServer - from .asyncio_client import AsyncClient - from .async_drivers.asgi import ASGIApp - try: - from .async_drivers.tornado import get_tornado_handler - except ImportError: - get_tornado_handler = None -else: # pragma: no cover - AsyncServer = None - AsyncClient = None - get_tornado_handler = None - ASGIApp = None - -__all__ = ['Server', 'WSGIApp', 'Middleware', 'Client'] -if AsyncServer is not None: # pragma: no cover - __all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler', - 'AsyncClient'], diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 40d8dd6..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_client.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_client.cpython-311.pyc deleted file mode 100644 index f746059..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_client.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_server.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_server.cpython-311.pyc deleted file mode 100644 index 8c733e8..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_server.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_socket.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_socket.cpython-311.pyc deleted file mode 100644 index 2a0099d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/asyncio_socket.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/client.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/client.cpython-311.pyc deleted file mode 100644 index c42e62e..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/client.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/exceptions.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/exceptions.cpython-311.pyc deleted file mode 100644 index 9589a18..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/exceptions.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/json.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/json.cpython-311.pyc deleted file mode 100644 index 738199e..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/json.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/middleware.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/middleware.cpython-311.pyc deleted file mode 100644 index f8577f3..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/middleware.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/packet.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/packet.cpython-311.pyc deleted file mode 100644 index a10e600..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/packet.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/payload.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/payload.cpython-311.pyc deleted file mode 100644 index 7aadea6..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/payload.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/server.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/server.cpython-311.pyc deleted file mode 100644 index aec7710..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/server.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/socket.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/socket.cpython-311.pyc deleted file mode 100644 index 62fee02..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/socket.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/static_files.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/static_files.cpython-311.pyc deleted file mode 100644 index 847693a..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/__pycache__/static_files.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index c3f9e02..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/aiohttp.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/aiohttp.cpython-311.pyc deleted file mode 100644 index 2092e49..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/aiohttp.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/asgi.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/asgi.cpython-311.pyc deleted file mode 100644 index 9ca7afe..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/asgi.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/eventlet.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/eventlet.cpython-311.pyc deleted file mode 100644 index b7bfa29..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/eventlet.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent.cpython-311.pyc deleted file mode 100644 index f360bb4..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent_uwsgi.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent_uwsgi.cpython-311.pyc deleted file mode 100644 index 4eb65aa..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/gevent_uwsgi.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/sanic.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/sanic.cpython-311.pyc deleted file mode 100644 index 62037d4..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/sanic.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/threading.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/threading.cpython-311.pyc deleted file mode 100644 index ce1b6f1..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/threading.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/tornado.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/tornado.cpython-311.pyc deleted file mode 100644 index 7b2f9e3..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/__pycache__/tornado.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/aiohttp.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/aiohttp.py deleted file mode 100644 index a591995..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/aiohttp.py +++ /dev/null @@ -1,127 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit - -from aiohttp.web import Response, WebSocketResponse - - -def create_route(app, engineio_server, engineio_endpoint): - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.router.add_get(engineio_endpoint, engineio_server.handle_request) - app.router.add_post(engineio_endpoint, engineio_server.handle_request) - app.router.add_route('OPTIONS', engineio_endpoint, - engineio_server.handle_request) - - -def translate_request(request): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - message = request._message - payload = request._payload - - uri_parts = urlsplit(message.path) - environ = { - 'wsgi.input': payload, - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': message.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': message.path, - 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'aiohttp.request': request - } - - for hdr_name, hdr_value in message.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - return Response(body=payload, status=int(status.split()[0]), - headers=headers) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a aiohttp WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['aiohttp.request'] - self._sock = WebSocketResponse() - await self._sock.prepare(request) - - self.environ = environ - await self.handler(self) - return self._sock - - async def close(self): - await self._sock.close() - - async def send(self, message): - if isinstance(message, bytes): - f = self._sock.send_bytes - else: - f = self._sock.send_str - if asyncio.iscoroutinefunction(f): - await f(message) - else: - f(message) - - async def wait(self): - msg = await self._sock.receive() - if not isinstance(msg.data, bytes) and \ - not isinstance(msg.data, str): - raise IOError() - return msg.data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/asgi.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/asgi.py deleted file mode 100644 index d661841..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/asgi.py +++ /dev/null @@ -1,263 +0,0 @@ -import os -import sys -import asyncio - -from engineio.static_files import get_static_file - - -class ASGIApp: - """ASGI application middleware for Engine.IO. - - This middleware dispatches traffic to an Engine.IO application. It can - also serve a list of static files to the client, or forward unrelated - HTTP traffic to another ASGI application. - - :param engineio_server: The Engine.IO server. Must be an instance of the - ``engineio.AsyncServer`` class. - :param static_files: A dictionary with static file mapping rules. See the - documentation for details on this argument. - :param other_asgi_app: A separate ASGI app that receives all other traffic. - :param engineio_path: The endpoint where the Engine.IO application should - be installed. The default value is appropriate for - most cases. - :param on_startup: function to be called on application startup; can be - coroutine - :param on_shutdown: function to be called on application shutdown; can be - coroutine - - Example usage:: - - import engineio - import uvicorn - - eio = engineio.AsyncServer() - app = engineio.ASGIApp(eio, static_files={ - '/': {'content_type': 'text/html', 'filename': 'index.html'}, - '/index.html': {'content_type': 'text/html', - 'filename': 'index.html'}, - }) - uvicorn.run(app, '127.0.0.1', 5000) - """ - def __init__(self, engineio_server, other_asgi_app=None, - static_files=None, engineio_path='engine.io', - on_startup=None, on_shutdown=None): - self.engineio_server = engineio_server - self.other_asgi_app = other_asgi_app - self.engineio_path = engineio_path - if not self.engineio_path.startswith('/'): - self.engineio_path = '/' + self.engineio_path - if not self.engineio_path.endswith('/'): - self.engineio_path += '/' - self.static_files = static_files or {} - self.on_startup = on_startup - self.on_shutdown = on_shutdown - - async def __call__(self, scope, receive, send): - if scope['type'] in ['http', 'websocket'] and \ - scope['path'].startswith(self.engineio_path): - await self.engineio_server.handle_request(scope, receive, send) - else: - static_file = get_static_file(scope['path'], self.static_files) \ - if scope['type'] == 'http' and self.static_files else None - if scope['type'] == 'lifespan': - await self.lifespan(scope, receive, send) - elif static_file and os.path.exists(static_file['filename']): - await self.serve_static_file(static_file, receive, send) - elif self.other_asgi_app is not None: - await self.other_asgi_app(scope, receive, send) - else: - await self.not_found(receive, send) - - async def serve_static_file(self, static_file, receive, - send): # pragma: no cover - event = await receive() - if event['type'] == 'http.request': - with open(static_file['filename'], 'rb') as f: - payload = f.read() - await send({'type': 'http.response.start', - 'status': 200, - 'headers': [(b'Content-Type', static_file[ - 'content_type'].encode('utf-8'))]}) - await send({'type': 'http.response.body', - 'body': payload}) - - async def lifespan(self, scope, receive, send): - if self.other_asgi_app is not None and self.on_startup is None and \ - self.on_shutdown is None: - # let the other ASGI app handle lifespan events - await self.other_asgi_app(scope, receive, send) - return - - while True: - event = await receive() - if event['type'] == 'lifespan.startup': - if self.on_startup: - try: - await self.on_startup() \ - if asyncio.iscoroutinefunction(self.on_startup) \ - else self.on_startup() - except: - await send({'type': 'lifespan.startup.failed'}) - return - await send({'type': 'lifespan.startup.complete'}) - elif event['type'] == 'lifespan.shutdown': - if self.on_shutdown: - try: - await self.on_shutdown() \ - if asyncio.iscoroutinefunction(self.on_shutdown) \ - else self.on_shutdown() - except: - await send({'type': 'lifespan.shutdown.failed'}) - return - await send({'type': 'lifespan.shutdown.complete'}) - return - - async def not_found(self, receive, send): - """Return a 404 Not Found error to the client.""" - await send({'type': 'http.response.start', - 'status': 404, - 'headers': [(b'Content-Type', b'text/plain')]}) - await send({'type': 'http.response.body', - 'body': b'Not Found'}) - - -async def translate_request(scope, receive, send): - class AwaitablePayload(object): # pragma: no cover - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - event = await receive() - payload = b'' - if event['type'] == 'http.request': - payload += event.get('body') or b'' - while event.get('more_body'): - event = await receive() - if event['type'] == 'http.request': - payload += event.get('body') or b'' - elif event['type'] == 'websocket.connect': - pass - else: - return {} - - raw_uri = scope['path'].encode('utf-8') - if 'query_string' in scope and scope['query_string']: - raw_uri += b'?' + scope['query_string'] - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'asgi', - 'REQUEST_METHOD': scope.get('method', 'GET'), - 'PATH_INFO': scope['path'], - 'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'), - 'RAW_URI': raw_uri.decode('utf-8'), - 'SCRIPT_NAME': '', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'asgi', - 'SERVER_PORT': '0', - 'asgi.receive': receive, - 'asgi.send': send, - 'asgi.scope': scope, - } - - for hdr_name, hdr_value in scope['headers']: - hdr_name = hdr_name.upper().decode('utf-8') - hdr_value = hdr_value.decode('utf-8') - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - return environ - - -async def make_response(status, headers, payload, environ): - headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers] - if environ['asgi.scope']['type'] == 'websocket': - if status.startswith('200 '): - await environ['asgi.send']({'type': 'websocket.accept', - 'headers': headers}) - else: - if payload: - reason = payload.decode('utf-8') \ - if isinstance(payload, bytes) else str(payload) - await environ['asgi.send']({'type': 'websocket.close', - 'reason': reason}) - else: - await environ['asgi.send']({'type': 'websocket.close'}) - return - - await environ['asgi.send']({'type': 'http.response.start', - 'status': int(status.split(' ')[0]), - 'headers': headers}) - await environ['asgi.send']({'type': 'http.response.body', - 'body': payload}) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides an asgi WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.asgi_receive = None - self.asgi_send = None - - async def __call__(self, environ): - self.asgi_receive = environ['asgi.receive'] - self.asgi_send = environ['asgi.send'] - await self.asgi_send({'type': 'websocket.accept'}) - await self.handler(self) - - async def close(self): - await self.asgi_send({'type': 'websocket.close'}) - - async def send(self, message): - msg_bytes = None - msg_text = None - if isinstance(message, bytes): - msg_bytes = message - else: - msg_text = message - await self.asgi_send({'type': 'websocket.send', - 'bytes': msg_bytes, - 'text': msg_text}) - - async def wait(self): - event = await self.asgi_receive() - if event['type'] != 'websocket.receive': - raise IOError() - return event.get('bytes') or event.get('text') - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/eventlet.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/eventlet.py deleted file mode 100644 index 9be3797..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/eventlet.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import absolute_import - -from eventlet.green.threading import Thread, Event -from eventlet import queue -from eventlet import sleep -from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI - - -class WebSocketWSGI(_WebSocketWSGI): - def __init__(self, *args, **kwargs): - super(WebSocketWSGI, self).__init__(*args, **kwargs) - self._sock = None - - def __call__(self, environ, start_response): - if 'eventlet.input' not in environ: - raise RuntimeError('You need to use the eventlet server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['eventlet.input'].get_socket() - return super(WebSocketWSGI, self).__call__(environ, start_response) - - -_async = { - 'thread': Thread, - 'queue': queue.Queue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': WebSocketWSGI, - 'sleep': sleep, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent.py deleted file mode 100644 index 024dd0a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent.py +++ /dev/null @@ -1,63 +0,0 @@ -from __future__ import absolute_import - -import gevent -from gevent import queue -from gevent.event import Event -try: - import geventwebsocket # noqa - _websocket_available = True -except ImportError: - _websocket_available = False - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class WebSocketWSGI(object): # pragma: no cover - """ - This wrapper class provides a gevent WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - if 'wsgi.websocket' not in environ: - raise RuntimeError('You need to use the gevent-websocket server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['wsgi.websocket'] - self.environ = environ - self.version = self._sock.version - self.path = self._sock.path - self.origin = self._sock.origin - self.protocol = self._sock.protocol - return self.app(self) - - def close(self): - return self._sock.close() - - def send(self, message): - return self._sock.send(message) - - def wait(self): - return self._sock.receive() - - -_async = { - 'thread': Thread, - 'queue': queue.JoinableQueue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': WebSocketWSGI if _websocket_available else None, - 'sleep': gevent.sleep, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent_uwsgi.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent_uwsgi.py deleted file mode 100644 index 9a5cc0c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/gevent_uwsgi.py +++ /dev/null @@ -1,167 +0,0 @@ -import gevent -from gevent import queue -from gevent.event import Event -from gevent import selectors -import uwsgi -_websocket_available = hasattr(uwsgi, 'websocket_handshake') - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class uWSGIWebSocket(object): # pragma: no cover - """ - This wrapper class provides a uWSGI WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - self._sock = None - self.received_messages = [] - - def __call__(self, environ, start_response): - self._sock = uwsgi.connection_fd() - self.environ = environ - - uwsgi.websocket_handshake() - - self._req_ctx = None - if hasattr(uwsgi, 'request_context'): - # uWSGI >= 2.1.x with support for api access across-greenlets - self._req_ctx = uwsgi.request_context() - else: - # use event and queue for sending messages - self._event = Event() - self._send_queue = queue.Queue() - - # spawn a select greenlet - def select_greenlet_runner(fd, event): - """Sets event when data becomes available to read on fd.""" - sel = selectors.DefaultSelector() - sel.register(fd, selectors.EVENT_READ) - try: - while True: - sel.select() - event.set() - except gevent.GreenletExit: - sel.unregister(fd) - self._select_greenlet = gevent.spawn( - select_greenlet_runner, - self._sock, - self._event) - - self.app(self) - - def close(self): - """Disconnects uWSGI from the client.""" - if self._req_ctx is None: - # better kill it here in case wait() is not called again - self._select_greenlet.kill() - self._event.set() - uwsgi.disconnect() - - def _send(self, msg): - """Transmits message either in binary or UTF-8 text mode, - depending on its type.""" - if isinstance(msg, bytes): - method = uwsgi.websocket_send_binary - else: - method = uwsgi.websocket_send - if self._req_ctx is not None: - method(msg, request_context=self._req_ctx) - else: - method(msg) - - def _decode_received(self, msg): - """Returns either bytes or str, depending on message type.""" - if not isinstance(msg, bytes): - # already decoded - do nothing - return msg - # only decode from utf-8 if message is not binary data - type = ord(msg[0:1]) - if type >= 48: # no binary - return msg.decode('utf-8') - # binary message, don't try to decode - return msg - - def send(self, msg): - """Queues a message for sending. Real transmission is done in - wait method. - Sends directly if uWSGI version is new enough.""" - if self._req_ctx is not None: - self._send(msg) - else: - self._send_queue.put(msg) - self._event.set() - - def wait(self): - """Waits and returns received messages. - If running in compatibility mode for older uWSGI versions, - it also sends messages that have been queued by send(). - A return value of None means that connection was closed. - This must be called repeatedly. For uWSGI < 2.1.x it must - be called from the main greenlet.""" - while True: - if self._req_ctx is not None: - try: - msg = uwsgi.websocket_recv(request_context=self._req_ctx) - except IOError: # connection closed - self.close() - return None - return self._decode_received(msg) - else: - if self.received_messages: - return self.received_messages.pop(0) - - # we wake up at least every 3 seconds to let uWSGI - # do its ping/ponging - event_set = self._event.wait(timeout=3) - if event_set: - self._event.clear() - # maybe there is something to send - msgs = [] - while True: - try: - msgs.append(self._send_queue.get(block=False)) - except gevent.queue.Empty: - break - for msg in msgs: - try: - self._send(msg) - except IOError: - self.close() - return None - # maybe there is something to receive, if not, at least - # ensure uWSGI does its ping/ponging - while True: - try: - msg = uwsgi.websocket_recv_nb() - except IOError: # connection closed - self.close() - return None - if msg: # message available - self.received_messages.append( - self._decode_received(msg)) - else: - break - if self.received_messages: - return self.received_messages.pop(0) - - -_async = { - 'thread': Thread, - 'queue': queue.JoinableQueue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': uWSGIWebSocket if _websocket_available else None, - 'sleep': gevent.sleep, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/sanic.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/sanic.py deleted file mode 100644 index 88b3e5f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/sanic.py +++ /dev/null @@ -1,147 +0,0 @@ -import sys -from urllib.parse import urlsplit - -try: # pragma: no cover - from sanic.response import HTTPResponse - try: - from sanic.server.protocols.websocket_protocol import WebSocketProtocol - except ImportError: - print('yay') - from sanic.websocket import WebSocketProtocol -except ImportError: - HTTPResponse = None - WebSocketProtocol = None - - -def create_route(app, engineio_server, engineio_endpoint): # pragma: no cover - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.add_route(engineio_server.handle_request, engineio_endpoint, - methods=['GET', 'POST', 'OPTIONS']) - try: - app.enable_websocket() - except AttributeError: - # ignore, this version does not support websocket - pass - - -def translate_request(request): # pragma: no cover - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - uri_parts = urlsplit(request.url) - environ = { - 'wsgi.input': AwaitablePayload(request.body), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'sanic', - 'REQUEST_METHOD': request.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': request.url, - 'SERVER_PROTOCOL': 'HTTP/' + request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'sanic', - 'SERVER_PORT': '0', - 'sanic.request': request - } - - for hdr_name, hdr_value in request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): # pragma: no cover - """This function generates an appropriate response object for this async - mode. - """ - headers_dict = {} - content_type = None - for h in headers: - if h[0].lower() == 'content-type': - content_type = h[1] - else: - headers_dict[h[0]] = h[1] - return HTTPResponse(body=payload, content_type=content_type, - status=int(status.split()[0]), headers=headers_dict) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a sanic WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['sanic.request'] - protocol = request.transport.get_protocol() - self._sock = await protocol.websocket_handshake(request) - - self.environ = environ - await self.handler(self) - - async def close(self): - await self._sock.close() - - async def send(self, message): - await self._sock.send(message) - - async def wait(self): - data = await self._sock.recv() - if not isinstance(data, bytes) and \ - not isinstance(data, str): - raise IOError() - return data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket if WebSocketProtocol else None, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/threading.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/threading.py deleted file mode 100644 index de8df79..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/threading.py +++ /dev/null @@ -1,48 +0,0 @@ -from __future__ import absolute_import -import queue -import threading -import time - -try: - from simple_websocket import Server, ConnectionClosed - _websocket_available = True -except ImportError: # pragma: no cover - _websocket_available = False - - -class WebSocketWSGI(object): # pragma: no cover - """ - This wrapper class provides a threading WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - self.ws = Server(environ) - return self.app(self) - - def close(self): - return self.ws.close() - - def send(self, message): - try: - return self.ws.send(message) - except ConnectionClosed: - raise IOError() - - def wait(self): - try: - return self.ws.receive() - except ConnectionClosed: - return None - - -_async = { - 'thread': threading.Thread, - 'queue': queue.Queue, - 'queue_empty': queue.Empty, - 'event': threading.Event, - 'websocket': WebSocketWSGI if _websocket_available else None, - 'sleep': time.sleep, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/tornado.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/tornado.py deleted file mode 100644 index eb1c4de..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/async_drivers/tornado.py +++ /dev/null @@ -1,182 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit -from .. import exceptions - -import tornado.web -import tornado.websocket - - -def get_tornado_handler(engineio_server): - class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - if isinstance(engineio_server.cors_allowed_origins, str): - if engineio_server.cors_allowed_origins == '*': - self.allowed_origins = None - else: - self.allowed_origins = [ - engineio_server.cors_allowed_origins] - else: - self.allowed_origins = engineio_server.cors_allowed_origins - self.receive_queue = asyncio.Queue() - - async def get(self, *args, **kwargs): - if self.request.headers.get('Upgrade', '').lower() == 'websocket': - ret = super().get(*args, **kwargs) - if asyncio.iscoroutine(ret): - await ret - else: - await engineio_server.handle_request(self) - - async def open(self, *args, **kwargs): - # this is the handler for the websocket request - asyncio.ensure_future(engineio_server.handle_request(self)) - - async def post(self, *args, **kwargs): - await engineio_server.handle_request(self) - - async def options(self, *args, **kwargs): - await engineio_server.handle_request(self) - - async def on_message(self, message): - await self.receive_queue.put(message) - - async def get_next_message(self): - return await self.receive_queue.get() - - def on_close(self): - self.receive_queue.put_nowait(None) - - def check_origin(self, origin): - if self.allowed_origins is None or origin in self.allowed_origins: - return True - return super().check_origin(origin) - - def get_compression_options(self): - # enable compression - return {} - - return Handler - - -def translate_request(handler): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - payload = handler.request.body - - uri_parts = urlsplit(handler.request.path) - full_uri = handler.request.path - if handler.request.query: # pragma: no cover - full_uri += '?' + handler.request.query - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': handler.request.method, - 'QUERY_STRING': handler.request.query or '', - 'RAW_URI': full_uri, - 'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'tornado.handler': handler - } - - for hdr_name, hdr_value in handler.request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - tornado_handler = environ['tornado.handler'] - try: - tornado_handler.set_status(int(status.split()[0])) - except RuntimeError: # pragma: no cover - # for websocket connections Tornado does not accept a response, since - # it already emitted the 101 status code - return - for header, value in headers: - tornado_handler.set_header(header, value) - tornado_handler.write(payload) - tornado_handler.finish() - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a tornado WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.tornado_handler = None - - async def __call__(self, environ): - self.tornado_handler = environ['tornado.handler'] - self.environ = environ - await self.handler(self) - - async def close(self): - self.tornado_handler.close() - - async def send(self, message): - try: - self.tornado_handler.write_message( - message, binary=isinstance(message, bytes)) - except tornado.websocket.WebSocketClosedError: - raise exceptions.EngineIOError() - - async def wait(self): - msg = await self.tornado_handler.get_next_message() - if not isinstance(msg, bytes) and \ - not isinstance(msg, str): - raise IOError() - return msg - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_client.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_client.py deleted file mode 100644 index cf7ae1a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_client.py +++ /dev/null @@ -1,640 +0,0 @@ -import asyncio -import signal -import ssl -import threading - -try: - import aiohttp -except ImportError: # pragma: no cover - aiohttp = None - -from . import client -from . import exceptions -from . import packet -from . import payload - -async_signal_handler_set = False - - -def async_signal_handler(): - """SIGINT handler. - - Disconnect all active async clients. - """ - async def _handler(): # pragma: no cover - for c in client.connected_clients[:]: - if c.is_asyncio_based(): - await c.disconnect() - - # cancel all running tasks - tasks = [task for task in asyncio.all_tasks() if task is not - asyncio.current_task()] - for task in tasks: - task.cancel() - await asyncio.gather(*tasks, return_exceptions=True) - asyncio.get_event_loop().stop() - - asyncio.ensure_future(_handler()) - - -class AsyncClient(client.Client): - """An Engine.IO client for asyncio. - - This class implements a fully compliant Engine.IO web client with support - for websocket and long-polling transports, compatible with the asyncio - framework on Python 3.5 or newer. - - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. Note that fatal errors are logged even when - ``logger`` is ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param request_timeout: A timeout in seconds for requests. The default is - 5 seconds. - :param http_session: an initialized ``aiohttp.ClientSession`` object to be - used when sending requests to the server. Use it if - you need to add special client options such as proxy - servers, SSL certificates, etc. - :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to - skip SSL certificate verification, allowing - connections to servers with self signed certificates. - The default is ``True``. - :param handle_sigint: Set to ``True`` to automatically handle disconnection - when the process is interrupted, or to ``False`` to - leave interrupt handling to the calling application. - Interrupt handling can only be enabled when the - client instance is created in the main thread. - :param websocket_extra_options: Dictionary containing additional keyword - arguments passed to - ``aiohttp.ws_connect()``. - """ - - def is_asyncio_based(self): - return True - - async def connect(self, url, headers=None, transports=None, - engineio_path='engine.io'): - """Connect to an Engine.IO server. - - :param url: The URL of the Engine.IO server. It can include custom - query string parameters if required by the server. - :param headers: A dictionary with custom headers to send with the - connection request. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. If not - given, the polling transport is connected first, - then an upgrade to websocket is attempted. - :param engineio_path: The endpoint where the Engine.IO server is - installed. The default value is appropriate for - most cases. - - Note: this method is a coroutine. - - Example usage:: - - eio = engineio.Client() - await eio.connect('http://localhost:5000') - """ - global async_signal_handler_set - if self.handle_sigint and not async_signal_handler_set and \ - threading.current_thread() == threading.main_thread(): - try: - asyncio.get_event_loop().add_signal_handler( - signal.SIGINT, async_signal_handler) - except NotImplementedError: # pragma: no cover - self.logger.warning('Signal handler is unsupported') - async_signal_handler_set = True - - if self.state != 'disconnected': - raise ValueError('Client is not in a disconnected state') - valid_transports = ['polling', 'websocket'] - if transports is not None: - if isinstance(transports, str): - transports = [transports] - transports = [transport for transport in transports - if transport in valid_transports] - if not transports: - raise ValueError('No valid transports provided') - self.transports = transports or valid_transports - self.queue = self.create_queue() - return await getattr(self, '_connect_' + self.transports[0])( - url, headers or {}, engineio_path) - - async def wait(self): - """Wait until the connection with the server ends. - - Client applications can use this function to block the main thread - during the life of the connection. - - Note: this method is a coroutine. - """ - if self.read_loop_task: - await self.read_loop_task - - async def send(self, data): - """Send a message to the server. - - :param data: The data to send to the server. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - - Note: this method is a coroutine. - """ - await self._send_packet(packet.Packet(packet.MESSAGE, data=data)) - - async def disconnect(self, abort=False): - """Disconnect from the server. - - :param abort: If set to ``True``, do not wait for background tasks - associated with the connection to end. - - Note: this method is a coroutine. - """ - if self.state == 'connected': - await self._send_packet(packet.Packet(packet.CLOSE)) - await self.queue.put(None) - self.state = 'disconnecting' - await self._trigger_event('disconnect', run_async=False) - if self.current_transport == 'websocket': - await self.ws.close() - if not abort: - await self.read_loop_task - self.state = 'disconnected' - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - await self._reset() - - def start_background_task(self, target, *args, **kwargs): - """Start a background task. - - This is a utility function that applications can use to start a - background task. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - The return value is a ``asyncio.Task`` object. - """ - return asyncio.ensure_future(target(*args, **kwargs)) - - async def sleep(self, seconds=0): - """Sleep for the requested amount of time. - - Note: this method is a coroutine. - """ - return await asyncio.sleep(seconds) - - def create_queue(self): - """Create a queue object.""" - q = asyncio.Queue() - q.Empty = asyncio.QueueEmpty - return q - - def create_event(self): - """Create an event object.""" - return asyncio.Event() - - async def _reset(self): - super()._reset() - if not self.external_http: # pragma: no cover - if self.http and not self.http.closed: - await self.http.close() - - def __del__(self): # pragma: no cover - # try to close the aiohttp session if it is still open - if self.http and not self.http.closed: - try: - loop = asyncio.get_event_loop() - if loop.is_running(): - loop.ensure_future(self.http.close()) - else: - loop.run_until_complete(self.http.close()) - except: - pass - - async def _connect_polling(self, url, headers, engineio_path): - """Establish a long-polling connection to the Engine.IO server.""" - if aiohttp is None: # pragma: no cover - self.logger.error('aiohttp not installed -- cannot make HTTP ' - 'requests!') - return - self.base_url = self._get_engineio_url(url, engineio_path, 'polling') - self.logger.info('Attempting polling connection to ' + self.base_url) - r = await self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), headers=headers, - timeout=self.request_timeout) - if r is None or isinstance(r, str): - await self._reset() - raise exceptions.ConnectionError( - r or 'Connection refused by the server') - if r.status < 200 or r.status >= 300: - await self._reset() - try: - arg = await r.json() - except aiohttp.ClientError: - arg = None - raise exceptions.ConnectionError( - 'Unexpected status code {} in server response'.format( - r.status), arg) - try: - p = payload.Payload(encoded_payload=(await r.read()).decode( - 'utf-8')) - except ValueError: - raise exceptions.ConnectionError( - 'Unexpected response from server') from None - open_packet = p.packets[0] - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError( - 'OPEN packet not returned by server') - self.logger.info( - 'Polling connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0 - self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0 - self.current_transport = 'polling' - self.base_url += '&sid=' + self.sid - - self.state = 'connected' - client.connected_clients.append(self) - await self._trigger_event('connect', run_async=False) - - for pkt in p.packets[1:]: - await self._receive_packet(pkt) - - if 'websocket' in self.upgrades and 'websocket' in self.transports: - # attempt to upgrade to websocket - if await self._connect_websocket(url, headers, engineio_path): - # upgrade to websocket succeeded, we're done here - return - - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_polling) - - async def _connect_websocket(self, url, headers, engineio_path): - """Establish or upgrade to a WebSocket connection with the server.""" - if aiohttp is None: # pragma: no cover - self.logger.error('aiohttp package not installed') - return False - websocket_url = self._get_engineio_url(url, engineio_path, - 'websocket') - if self.sid: - self.logger.info( - 'Attempting WebSocket upgrade to ' + websocket_url) - upgrade = True - websocket_url += '&sid=' + self.sid - else: - upgrade = False - self.base_url = websocket_url - self.logger.info( - 'Attempting WebSocket connection to ' + websocket_url) - - if self.http is None or self.http.closed: # pragma: no cover - self.http = aiohttp.ClientSession() - - # extract any new cookies passed in a header so that they can also be - # sent the the WebSocket route - cookies = {} - for header, value in headers.items(): - if header.lower() == 'cookie': - cookies = dict( - [cookie.split('=', 1) for cookie in value.split('; ')]) - del headers[header] - break - self.http.cookie_jar.update_cookies(cookies) - - extra_options = {'timeout': self.request_timeout} - if not self.ssl_verify: - ssl_context = ssl.create_default_context() - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE - extra_options['ssl'] = ssl_context - - # combine internally generated options with the ones supplied by the - # caller. The caller's options take precedence. - headers.update(self.websocket_extra_options.pop('headers', {})) - extra_options['headers'] = headers - extra_options.update(self.websocket_extra_options) - - try: - ws = await self.http.ws_connect( - websocket_url + self._get_url_timestamp(), **extra_options) - except (aiohttp.client_exceptions.WSServerHandshakeError, - aiohttp.client_exceptions.ServerConnectionError, - aiohttp.client_exceptions.ClientConnectionError): - if upgrade: - self.logger.warning( - 'WebSocket upgrade failed: connection error') - return False - else: - raise exceptions.ConnectionError('Connection error') - if upgrade: - p = packet.Packet(packet.PING, data='probe').encode() - try: - await ws.send_str(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - try: - p = (await ws.receive()).data - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected recv exception: %s', - str(e)) - return False - pkt = packet.Packet(encoded_packet=p) - if pkt.packet_type != packet.PONG or pkt.data != 'probe': - self.logger.warning( - 'WebSocket upgrade failed: no PONG packet') - return False - p = packet.Packet(packet.UPGRADE).encode() - try: - await ws.send_str(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - self.current_transport = 'websocket' - self.logger.info('WebSocket upgrade was successful') - else: - try: - p = (await ws.receive()).data - except Exception as e: # pragma: no cover - raise exceptions.ConnectionError( - 'Unexpected recv exception: ' + str(e)) - open_packet = packet.Packet(encoded_packet=p) - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError('no OPEN packet') - self.logger.info( - 'WebSocket connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0 - self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0 - self.current_transport = 'websocket' - - self.state = 'connected' - client.connected_clients.append(self) - await self._trigger_event('connect', run_async=False) - - self.ws = ws - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_websocket) - return True - - async def _receive_packet(self, pkt): - """Handle incoming packets from the server.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.logger.info( - 'Received packet %s data %s', packet_name, - pkt.data if not isinstance(pkt.data, bytes) else '') - if pkt.packet_type == packet.MESSAGE: - await self._trigger_event('message', pkt.data, run_async=True) - elif pkt.packet_type == packet.PING: - await self._send_packet(packet.Packet(packet.PONG, pkt.data)) - elif pkt.packet_type == packet.CLOSE: - await self.disconnect(abort=True) - elif pkt.packet_type == packet.NOOP: - pass - else: - self.logger.error('Received unexpected packet of type %s', - pkt.packet_type) - - async def _send_packet(self, pkt): - """Queue a packet to be sent to the server.""" - if self.state != 'connected': - return - await self.queue.put(pkt) - self.logger.info( - 'Sending packet %s data %s', - packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) else '') - - async def _send_request( - self, method, url, headers=None, body=None, - timeout=None): # pragma: no cover - if self.http is None or self.http.closed: - self.http = aiohttp.ClientSession() - http_method = getattr(self.http, method.lower()) - - try: - if not self.ssl_verify: - return await http_method( - url, headers=headers, data=body, - timeout=aiohttp.ClientTimeout(total=timeout), ssl=False) - else: - return await http_method( - url, headers=headers, data=body, - timeout=aiohttp.ClientTimeout(total=timeout)) - - except (aiohttp.ClientError, asyncio.TimeoutError) as exc: - self.logger.info('HTTP %s request to %s failed with error %s.', - method, url, exc) - return str(exc) - - async def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - ret = None - if event in self.handlers: - if asyncio.iscoroutinefunction(self.handlers[event]) is True: - if run_async: - return self.start_background_task(self.handlers[event], - *args) - else: - try: - ret = await self.handlers[event](*args) - except asyncio.CancelledError: # pragma: no cover - pass - except: - self.logger.exception(event + ' async handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - else: - if run_async: - async def async_handler(): - return self.handlers[event](*args) - - return self.start_background_task(async_handler) - else: - try: - ret = self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - return ret - - async def _read_loop_polling(self): - """Read packets by polling the Engine.IO server.""" - while self.state == 'connected': - self.logger.info( - 'Sending polling GET request to ' + self.base_url) - r = await self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), - timeout=max(self.ping_interval, self.ping_timeout) + 5) - if r is None or isinstance(r, str): - self.logger.warning( - r or 'Connection refused by the server, aborting') - await self.queue.put(None) - break - if r.status < 200 or r.status >= 300: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status) - await self.queue.put(None) - break - try: - p = payload.Payload(encoded_payload=(await r.read()).decode( - 'utf-8')) - except ValueError: - self.logger.warning( - 'Unexpected packet from server, aborting') - await self.queue.put(None) - break - for pkt in p.packets: - await self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - await self.write_loop_task - if self.state == 'connected': - await self._trigger_event('disconnect', run_async=False) - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - await self._reset() - self.logger.info('Exiting read loop task') - - async def _read_loop_websocket(self): - """Read packets from the Engine.IO WebSocket connection.""" - while self.state == 'connected': - p = None - try: - p = await asyncio.wait_for( - self.ws.receive(), - timeout=self.ping_interval + self.ping_timeout) - if not isinstance(p.data, (str, bytes)): # pragma: no cover - self.logger.warning( - 'Server sent unexpected packet %s data %s, aborting', - str(p.type), str(p.data)) - await self.queue.put(None) - break # the connection is broken - p = p.data - except asyncio.TimeoutError: - self.logger.warning( - 'Server has stopped communicating, aborting') - await self.queue.put(None) - break - except aiohttp.client_exceptions.ServerDisconnectedError: - self.logger.info( - 'Read loop: WebSocket connection was closed, aborting') - await self.queue.put(None) - break - except Exception as e: - self.logger.info( - 'Unexpected error receiving packet: "%s", aborting', - str(e)) - await self.queue.put(None) - break - try: - pkt = packet.Packet(encoded_packet=p) - except Exception as e: # pragma: no cover - self.logger.info( - 'Unexpected error decoding packet: "%s", aborting', str(e)) - await self.queue.put(None) - break - await self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - await self.write_loop_task - if self.state == 'connected': - await self._trigger_event('disconnect', run_async=False) - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - await self._reset() - self.logger.info('Exiting read loop task') - - async def _write_loop(self): - """This background task sends packages to the server as they are - pushed to the send queue. - """ - while self.state == 'connected': - # to simplify the timeout handling, use the maximum of the - # ping interval and ping timeout as timeout, with an extra 5 - # seconds grace period - timeout = max(self.ping_interval, self.ping_timeout) + 5 - packets = None - try: - packets = [await asyncio.wait_for(self.queue.get(), timeout)] - except (self.queue.Empty, asyncio.TimeoutError): - self.logger.error('packet queue is empty, aborting') - break - except asyncio.CancelledError: # pragma: no cover - break - if packets == [None]: - self.queue.task_done() - packets = [] - else: - while True: - try: - packets.append(self.queue.get_nowait()) - except self.queue.Empty: - break - if packets[-1] is None: - packets = packets[:-1] - self.queue.task_done() - break - if not packets: - # empty packet list returned -> connection closed - break - if self.current_transport == 'polling': - p = payload.Payload(packets=packets) - r = await self._send_request( - 'POST', self.base_url, body=p.encode(), - headers={'Content-Type': 'text/plain'}, - timeout=self.request_timeout) - for pkt in packets: - self.queue.task_done() - if r is None or isinstance(r, str): - self.logger.warning( - r or 'Connection refused by the server, aborting') - break - if r.status < 200 or r.status >= 300: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status) - await self._reset() - break - else: - # websocket - try: - for pkt in packets: - if pkt.binary: - await self.ws.send_bytes(pkt.encode()) - else: - await self.ws.send_str(pkt.encode()) - self.queue.task_done() - except (aiohttp.client_exceptions.ServerDisconnectedError, - BrokenPipeError, OSError): - self.logger.info( - 'Write loop: WebSocket connection was closed, ' - 'aborting') - break - self.logger.info('Exiting write loop task') diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_server.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_server.py deleted file mode 100644 index 66cf5f9..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_server.py +++ /dev/null @@ -1,513 +0,0 @@ -import asyncio -import urllib - -from . import exceptions -from . import packet -from . import server -from . import asyncio_socket - - -class AsyncServer(server.Server): - """An Engine.IO server for asyncio. - - This class implements a fully compliant Engine.IO web server with support - for websocket and long-polling transports, compatible with the asyncio - framework on Python 3.5 or newer. - - :param async_mode: The asynchronous model to use. See the Deployment - section in the documentation for a description of the - available options. Valid async modes are "aiohttp", - "sanic", "tornado" and "asgi". If this argument is not - given, "aiohttp" is tried first, followed by "sanic", - "tornado", and finally "asgi". The first async mode that - has all its dependencies installed is the one that is - chosen. - :param ping_interval: The interval in seconds at which the server pings - the client. The default is 25 seconds. For advanced - control, a two element tuple can be given, where - the first number is the ping interval and the second - is a grace period added by the server. - :param ping_timeout: The time in seconds that the client waits for the - server to respond before disconnecting. The default - is 20 seconds. - :param max_http_buffer_size: The maximum size of a message. The default - is 1,000,000 bytes. - :param allow_upgrades: Whether to allow transport upgrades or not. - :param http_compression: Whether to compress packages when using the - polling transport. - :param compression_threshold: Only compress messages when their byte size - is greater than this value. - :param cookie: If set to a string, it is the name of the HTTP cookie the - server sends back tot he client containing the client - session id. If set to a dictionary, the ``'name'`` key - contains the cookie name and other keys define cookie - attributes, where the value of each attribute can be a - string, a callable with no arguments, or a boolean. If set - to ``None`` (the default), a cookie is not sent to the - client. - :param cors_allowed_origins: Origin or list of origins that are allowed to - connect to this server. Only the same origin - is allowed by default. Set this argument to - ``'*'`` to allow all origins, or to ``[]`` to - disable CORS handling. - :param cors_credentials: Whether credentials (cookies, authentication) are - allowed in requests to this server. - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. Note that fatal - errors are logged even when ``logger`` is ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param async_handlers: If set to ``True``, run message event handlers in - non-blocking threads. To run handlers synchronously, - set to ``False``. The default is ``True``. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. Defaults to - ``['polling', 'websocket']``. - :param kwargs: Reserved for future extensions, any additional parameters - given as keyword arguments will be silently ignored. - """ - def is_asyncio_based(self): - return True - - def async_modes(self): - return ['aiohttp', 'sanic', 'tornado', 'asgi'] - - def attach(self, app, engineio_path='engine.io'): - """Attach the Engine.IO server to an application.""" - engineio_path = engineio_path.strip('/') - self._async['create_route'](app, self, '/{}/'.format(engineio_path)) - - async def send(self, sid, data): - """Send a message to a client. - - :param sid: The session id of the recipient client. - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - - Note: this method is a coroutine. - """ - try: - socket = self._get_socket(sid) - except KeyError: - # the socket is not available - self.logger.warning('Cannot send to sid %s', sid) - return - await socket.send(packet.Packet(packet.MESSAGE, data=data)) - - async def get_session(self, sid): - """Return the user session for a client. - - :param sid: The session id of the client. - - The return value is a dictionary. Modifications made to this - dictionary are not guaranteed to be preserved. If you want to modify - the user session, use the ``session`` context manager instead. - """ - socket = self._get_socket(sid) - return socket.session - - async def save_session(self, sid, session): - """Store the user session for a client. - - :param sid: The session id of the client. - :param session: The session dictionary. - """ - socket = self._get_socket(sid) - socket.session = session - - def session(self, sid): - """Return the user session for a client with context manager syntax. - - :param sid: The session id of the client. - - This is a context manager that returns the user session dictionary for - the client. Any changes that are made to this dictionary inside the - context manager block are saved back to the session. Example usage:: - - @eio.on('connect') - def on_connect(sid, environ): - username = authenticate_user(environ) - if not username: - return False - with eio.session(sid) as session: - session['username'] = username - - @eio.on('message') - def on_message(sid, msg): - async with eio.session(sid) as session: - print('received message from ', session['username']) - """ - class _session_context_manager(object): - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.session = None - - async def __aenter__(self): - self.session = await self.server.get_session(sid) - return self.session - - async def __aexit__(self, *args): - await self.server.save_session(sid, self.session) - - return _session_context_manager(self, sid) - - async def disconnect(self, sid=None): - """Disconnect a client. - - :param sid: The session id of the client to close. If this parameter - is not given, then all clients are closed. - - Note: this method is a coroutine. - """ - if sid is not None: - try: - socket = self._get_socket(sid) - except KeyError: # pragma: no cover - # the socket was already closed or gone - pass - else: - await socket.close() - if sid in self.sockets: # pragma: no cover - del self.sockets[sid] - else: - await asyncio.wait([asyncio.create_task(client.close()) - for client in self.sockets.values()]) - self.sockets = {} - - async def handle_request(self, *args, **kwargs): - """Handle an HTTP request from the client. - - This is the entry point of the Engine.IO application. This function - returns the HTTP response to deliver to the client. - - Note: this method is a coroutine. - """ - translate_request = self._async['translate_request'] - if asyncio.iscoroutinefunction(translate_request): - environ = await translate_request(*args, **kwargs) - else: - environ = translate_request(*args, **kwargs) - - if self.cors_allowed_origins != []: - # Validate the origin header if present - # This is important for WebSocket more than for HTTP, since - # browsers only apply CORS controls to HTTP. - origin = environ.get('HTTP_ORIGIN') - if origin: - allowed_origins = self._cors_allowed_origins(environ) - if allowed_origins is not None and origin not in \ - allowed_origins: - self._log_error_once( - origin + ' is not an accepted origin.', 'bad-origin') - return await self._make_response( - self._bad_request( - origin + ' is not an accepted origin.'), - environ) - - method = environ['REQUEST_METHOD'] - query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) - - sid = query['sid'][0] if 'sid' in query else None - jsonp = False - jsonp_index = None - - # make sure the client uses an allowed transport - transport = query.get('transport', ['polling'])[0] - if transport not in self.transports: - self._log_error_once('Invalid transport', 'bad-transport') - return await self._make_response( - self._bad_request('Invalid transport'), environ) - - # make sure the client speaks a compatible Engine.IO version - sid = query['sid'][0] if 'sid' in query else None - if sid is None and query.get('EIO') != ['4']: - self._log_error_once( - 'The client is using an unsupported version of the Socket.IO ' - 'or Engine.IO protocols', 'bad-version' - ) - return await self._make_response(self._bad_request( - 'The client is using an unsupported version of the Socket.IO ' - 'or Engine.IO protocols' - ), environ) - - if 'j' in query: - jsonp = True - try: - jsonp_index = int(query['j'][0]) - except (ValueError, KeyError, IndexError): - # Invalid JSONP index number - pass - - if jsonp and jsonp_index is None: - self._log_error_once('Invalid JSONP index number', - 'bad-jsonp-index') - r = self._bad_request('Invalid JSONP index number') - elif method == 'GET': - if sid is None: - # transport must be one of 'polling' or 'websocket'. - # if 'websocket', the HTTP_UPGRADE header must match. - upgrade_header = environ.get('HTTP_UPGRADE').lower() \ - if 'HTTP_UPGRADE' in environ else None - if transport == 'polling' \ - or transport == upgrade_header == 'websocket': - r = await self._handle_connect(environ, transport, - jsonp_index) - else: - self._log_error_once('Invalid websocket upgrade', - 'bad-upgrade') - r = self._bad_request('Invalid websocket upgrade') - else: - if sid not in self.sockets: - self._log_error_once('Invalid session ' + sid, 'bad-sid') - r = self._bad_request('Invalid session ' + sid) - else: - socket = self._get_socket(sid) - try: - packets = await socket.handle_get_request(environ) - if isinstance(packets, list): - r = self._ok(packets, jsonp_index=jsonp_index) - else: - r = packets - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - await self.disconnect(sid) - r = self._bad_request() - if sid in self.sockets and self.sockets[sid].closed: - del self.sockets[sid] - elif method == 'POST': - if sid is None or sid not in self.sockets: - self._log_error_once('Invalid session ' + sid, 'bad-sid') - r = self._bad_request('Invalid session ' + sid) - else: - socket = self._get_socket(sid) - try: - await socket.handle_post_request(environ) - r = self._ok(jsonp_index=jsonp_index) - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - await self.disconnect(sid) - r = self._bad_request() - except: # pragma: no cover - # for any other unexpected errors, we log the error - # and keep going - self.logger.exception('post request handler error') - r = self._ok(jsonp_index=jsonp_index) - elif method == 'OPTIONS': - r = self._ok() - else: - self.logger.warning('Method %s not supported', method) - r = self._method_not_found() - if not isinstance(r, dict): - return r - if self.http_compression and \ - len(r['response']) >= self.compression_threshold: - encodings = [e.split(';')[0].strip() for e in - environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] - for encoding in encodings: - if encoding in self.compression_methods: - r['response'] = \ - getattr(self, '_' + encoding)(r['response']) - r['headers'] += [('Content-Encoding', encoding)] - break - return await self._make_response(r, environ) - - def start_background_task(self, target, *args, **kwargs): - """Start a background task using the appropriate async model. - - This is a utility function that applications can use to start a - background task using the method that is compatible with the - selected async mode. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - The return value is a ``asyncio.Task`` object. - """ - return asyncio.ensure_future(target(*args, **kwargs)) - - async def sleep(self, seconds=0): - """Sleep for the requested amount of time using the appropriate async - model. - - This is a utility function that applications can use to put a task to - sleep without having to worry about using the correct call for the - selected async mode. - - Note: this method is a coroutine. - """ - return await asyncio.sleep(seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object using the appropriate async model. - - This is a utility function that applications can use to create a queue - without having to worry about using the correct call for the selected - async mode. For asyncio based async modes, this returns an instance of - ``asyncio.Queue``. - """ - return asyncio.Queue(*args, **kwargs) - - def get_queue_empty_exception(self): - """Return the queue empty exception for the appropriate async model. - - This is a utility function that applications can use to work with a - queue without having to worry about using the correct call for the - selected async mode. For asyncio based async modes, this returns an - instance of ``asyncio.QueueEmpty``. - """ - return asyncio.QueueEmpty - - def create_event(self, *args, **kwargs): - """Create an event object using the appropriate async model. - - This is a utility function that applications can use to create an - event without having to worry about using the correct call for the - selected async mode. For asyncio based async modes, this returns - an instance of ``asyncio.Event``. - """ - return asyncio.Event(*args, **kwargs) - - async def _make_response(self, response_dict, environ): - cors_headers = self._cors_headers(environ) - make_response = self._async['make_response'] - if asyncio.iscoroutinefunction(make_response): - response = await make_response( - response_dict['status'], - response_dict['headers'] + cors_headers, - response_dict['response'], environ) - else: - response = make_response( - response_dict['status'], - response_dict['headers'] + cors_headers, - response_dict['response'], environ) - return response - - async def _handle_connect(self, environ, transport, jsonp_index=None): - """Handle a client connection request.""" - if self.start_service_task: - # start the service task to monitor connected clients - self.start_service_task = False - self.start_background_task(self._service_task) - - sid = self.generate_id() - s = asyncio_socket.AsyncSocket(self, sid) - self.sockets[sid] = s - - pkt = packet.Packet( - packet.OPEN, {'sid': sid, - 'upgrades': self._upgrades(sid, transport), - 'pingTimeout': int(self.ping_timeout * 1000), - 'pingInterval': int(self.ping_interval * 1000)}) - await s.send(pkt) - s.schedule_ping() - - ret = await self._trigger_event('connect', sid, environ, - run_async=False) - if ret is not None and ret is not True: - del self.sockets[sid] - self.logger.warning('Application rejected connection') - return self._unauthorized(ret or None) - - if transport == 'websocket': - ret = await s.handle_get_request(environ) - if s.closed and sid in self.sockets: - # websocket connection ended, so we are done - del self.sockets[sid] - return ret - else: - s.connected = True - headers = None - if self.cookie: - if isinstance(self.cookie, dict): - headers = [( - 'Set-Cookie', - self._generate_sid_cookie(sid, self.cookie) - )] - else: - headers = [( - 'Set-Cookie', - self._generate_sid_cookie(sid, { - 'name': self.cookie, 'path': '/', 'SameSite': 'Lax' - }) - )] - try: - return self._ok(await s.poll(), headers=headers, - jsonp_index=jsonp_index) - except exceptions.QueueEmpty: - return self._bad_request() - - async def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - ret = None - if event in self.handlers: - if asyncio.iscoroutinefunction(self.handlers[event]) is True: - if run_async: - return self.start_background_task(self.handlers[event], - *args) - else: - try: - ret = await self.handlers[event](*args) - except asyncio.CancelledError: # pragma: no cover - pass - except: - self.logger.exception(event + ' async handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - else: - if run_async: - async def async_handler(): - return self.handlers[event](*args) - - return self.start_background_task(async_handler) - else: - try: - ret = self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - return ret - - async def _service_task(self): # pragma: no cover - """Monitor connected clients and clean up those that time out.""" - while True: - if len(self.sockets) == 0: - # nothing to do - await self.sleep(self.ping_timeout) - continue - - # go through the entire client list in a ping interval cycle - sleep_interval = self.ping_timeout / len(self.sockets) - - try: - # iterate over the current clients - for socket in self.sockets.copy().values(): - if not socket.closing and not socket.closed: - await socket.check_ping_timeout() - await self.sleep(sleep_interval) - except ( - SystemExit, - KeyboardInterrupt, - asyncio.CancelledError, - GeneratorExit, - ): - self.logger.info('service task canceled') - break - except: - if asyncio.get_event_loop().is_closed(): - self.logger.info('event loop is closed, exiting service ' - 'task') - break - - # an unexpected exception has occurred, log it and continue - self.logger.exception('service task exception') diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_socket.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_socket.py deleted file mode 100644 index 17265c2..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/asyncio_socket.py +++ /dev/null @@ -1,251 +0,0 @@ -import asyncio -import sys -import time - -from . import exceptions -from . import packet -from . import payload -from . import socket - - -class AsyncSocket(socket.Socket): - async def poll(self): - """Wait for packets to send to the client.""" - try: - packets = [await asyncio.wait_for( - self.queue.get(), - self.server.ping_interval + self.server.ping_timeout)] - self.queue.task_done() - except (asyncio.TimeoutError, asyncio.CancelledError): - raise exceptions.QueueEmpty() - if packets == [None]: - return [] - while True: - try: - pkt = self.queue.get_nowait() - self.queue.task_done() - if pkt is None: - self.queue.put_nowait(None) - break - packets.append(pkt) - except asyncio.QueueEmpty: - break - return packets - - async def receive(self, pkt): - """Receive packet from the client.""" - self.server.logger.info('%s: Received packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - if pkt.packet_type == packet.PONG: - self.schedule_ping() - elif pkt.packet_type == packet.MESSAGE: - await self.server._trigger_event( - 'message', self.sid, pkt.data, - run_async=self.server.async_handlers) - elif pkt.packet_type == packet.UPGRADE: - await self.send(packet.Packet(packet.NOOP)) - elif pkt.packet_type == packet.CLOSE: - await self.close(wait=False, abort=True) - else: - raise exceptions.UnknownPacketError() - - async def check_ping_timeout(self): - """Make sure the client is still sending pings.""" - if self.closed: - raise exceptions.SocketIsClosedError() - if self.last_ping and \ - time.time() - self.last_ping > self.server.ping_timeout: - self.server.logger.info('%s: Client is gone, closing socket', - self.sid) - # Passing abort=False here will cause close() to write a - # CLOSE packet. This has the effect of updating half-open sockets - # to their correct state of disconnected - await self.close(wait=False, abort=False) - return False - return True - - async def send(self, pkt): - """Send a packet to the client.""" - if not await self.check_ping_timeout(): - return - else: - await self.queue.put(pkt) - self.server.logger.info('%s: Sending packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - - async def handle_get_request(self, environ): - """Handle a long-polling GET request from the client.""" - connections = [ - s.strip() - for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] - transport = environ.get('HTTP_UPGRADE', '').lower() - if 'upgrade' in connections and transport in self.upgrade_protocols: - self.server.logger.info('%s: Received request to upgrade to %s', - self.sid, transport) - return await getattr(self, '_upgrade_' + transport)(environ) - if self.upgrading or self.upgraded: - # we are upgrading to WebSocket, do not return any more packets - # through the polling endpoint - return [packet.Packet(packet.NOOP)] - try: - packets = await self.poll() - except exceptions.QueueEmpty: - exc = sys.exc_info() - await self.close(wait=False) - raise exc[1].with_traceback(exc[2]) - return packets - - async def handle_post_request(self, environ): - """Handle a long-polling POST request from the client.""" - length = int(environ.get('CONTENT_LENGTH', '0')) - if length > self.server.max_http_buffer_size: - raise exceptions.ContentTooLongError() - else: - body = (await environ['wsgi.input'].read(length)).decode('utf-8') - p = payload.Payload(encoded_payload=body) - for pkt in p.packets: - await self.receive(pkt) - - async def close(self, wait=True, abort=False): - """Close the socket connection.""" - if not self.closed and not self.closing: - self.closing = True - await self.server._trigger_event('disconnect', self.sid) - if not abort: - await self.send(packet.Packet(packet.CLOSE)) - self.closed = True - if wait: - await self.queue.join() - - def schedule_ping(self): - async def send_ping(): - self.last_ping = None - await asyncio.sleep(self.server.ping_interval) - if not self.closing and not self.closed: - self.last_ping = time.time() - await self.send(packet.Packet(packet.PING)) - - self.server.start_background_task(send_ping) - - async def _upgrade_websocket(self, environ): - """Upgrade the connection from polling to websocket.""" - if self.upgraded: - raise IOError('Socket has been upgraded already') - if self.server._async['websocket'] is None: - # the selected async mode does not support websocket - return self.server._bad_request() - ws = self.server._async['websocket'](self._websocket_handler) - return await ws(environ) - - async def _websocket_handler(self, ws): - """Engine.IO handler for websocket transport.""" - async def websocket_wait(): - data = await ws.wait() - if data and len(data) > self.server.max_http_buffer_size: - raise ValueError('packet is too large') - return data - - if self.connected: - # the socket was already connected, so this is an upgrade - self.upgrading = True # hold packet sends during the upgrade - - try: - pkt = await websocket_wait() - except IOError: # pragma: no cover - return - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.PING or \ - decoded_pkt.data != 'probe': - self.server.logger.info( - '%s: Failed websocket upgrade, no PING packet', self.sid) - self.upgrading = False - return - await ws.send(packet.Packet(packet.PONG, data='probe').encode()) - await self.queue.put(packet.Packet(packet.NOOP)) # end poll - - try: - pkt = await websocket_wait() - except IOError: # pragma: no cover - self.upgrading = False - return - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.UPGRADE: - self.upgraded = False - self.server.logger.info( - ('%s: Failed websocket upgrade, expected UPGRADE packet, ' - 'received %s instead.'), - self.sid, pkt) - self.upgrading = False - return - self.upgraded = True - self.upgrading = False - else: - self.connected = True - self.upgraded = True - - # start separate writer thread - async def writer(): - while True: - packets = None - try: - packets = await self.poll() - except exceptions.QueueEmpty: - break - if not packets: - # empty packet list returned -> connection closed - break - try: - for pkt in packets: - await ws.send(pkt.encode()) - except: - break - writer_task = asyncio.ensure_future(writer()) - - self.server.logger.info( - '%s: Upgrade to websocket successful', self.sid) - - while True: - p = None - wait_task = asyncio.ensure_future(websocket_wait()) - try: - p = await asyncio.wait_for( - wait_task, - self.server.ping_interval + self.server.ping_timeout) - except asyncio.CancelledError: # pragma: no cover - # there is a bug (https://bugs.python.org/issue30508) in - # asyncio that causes a "Task exception never retrieved" error - # to appear when wait_task raises an exception before it gets - # cancelled. Calling wait_task.exception() prevents the error - # from being issued in Python 3.6, but causes other errors in - # other versions, so we run it with all errors suppressed and - # hope for the best. - try: - wait_task.exception() - except: - pass - break - except: - break - if p is None: - # connection closed by client - break - pkt = packet.Packet(encoded_packet=p) - try: - await self.receive(pkt) - except exceptions.UnknownPacketError: # pragma: no cover - pass - except exceptions.SocketIsClosedError: # pragma: no cover - self.server.logger.info('Receive error -- socket is closed') - break - except: # pragma: no cover - # if we get an unexpected exception we log the error and exit - # the connection properly - self.server.logger.exception('Unknown receive error') - - await self.queue.put(None) # unlock the writer task so it can exit - await asyncio.wait_for(writer_task, timeout=None) - await self.close(wait=False, abort=True) diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/client.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/client.py deleted file mode 100644 index 0584979..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/client.py +++ /dev/null @@ -1,727 +0,0 @@ -from base64 import b64encode -from engineio.json import JSONDecodeError -import logging -import queue -import signal -import ssl -import threading -import time -import urllib - -try: - import requests -except ImportError: # pragma: no cover - requests = None -try: - import websocket -except ImportError: # pragma: no cover - websocket = None -from . import exceptions -from . import packet -from . import payload - -default_logger = logging.getLogger('engineio.client') -connected_clients = [] - - -def signal_handler(sig, frame): - """SIGINT handler. - - Disconnect all active clients and then invoke the original signal handler. - """ - for client in connected_clients[:]: - if not client.is_asyncio_based(): - client.disconnect() - if callable(original_signal_handler): - return original_signal_handler(sig, frame) - else: # pragma: no cover - # Handle case where no original SIGINT handler was present. - return signal.default_int_handler(sig, frame) - - -original_signal_handler = None - - -class Client(object): - """An Engine.IO client. - - This class implements a fully compliant Engine.IO web client with support - for websocket and long-polling transports. - - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. Note that fatal errors are logged even when - ``logger`` is ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param request_timeout: A timeout in seconds for requests. The default is - 5 seconds. - :param http_session: an initialized ``requests.Session`` object to be used - when sending requests to the server. Use it if you - need to add special client options such as proxy - servers, SSL certificates, custom CA bundle, etc. - :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to - skip SSL certificate verification, allowing - connections to servers with self signed certificates. - The default is ``True``. - :param handle_sigint: Set to ``True`` to automatically handle disconnection - when the process is interrupted, or to ``False`` to - leave interrupt handling to the calling application. - Interrupt handling can only be enabled when the - client instance is created in the main thread. - :param websocket_extra_options: Dictionary containing additional keyword - arguments passed to - ``websocket.create_connection()``. - """ - event_names = ['connect', 'disconnect', 'message'] - - def __init__(self, logger=False, json=None, request_timeout=5, - http_session=None, ssl_verify=True, handle_sigint=True, - websocket_extra_options=None): - global original_signal_handler - if handle_sigint and original_signal_handler is None and \ - threading.current_thread() == threading.main_thread(): - original_signal_handler = signal.signal(signal.SIGINT, - signal_handler) - self.handlers = {} - self.base_url = None - self.transports = None - self.current_transport = None - self.sid = None - self.upgrades = None - self.ping_interval = None - self.ping_timeout = None - self.http = http_session - self.external_http = http_session is not None - self.handle_sigint = handle_sigint - self.ws = None - self.read_loop_task = None - self.write_loop_task = None - self.queue = None - self.state = 'disconnected' - self.ssl_verify = ssl_verify - self.websocket_extra_options = websocket_extra_options or {} - - if json is not None: - packet.Packet.json = json - if not isinstance(logger, bool): - self.logger = logger - else: - self.logger = default_logger - if self.logger.level == logging.NOTSET: - if logger: - self.logger.setLevel(logging.INFO) - else: - self.logger.setLevel(logging.ERROR) - self.logger.addHandler(logging.StreamHandler()) - - self.request_timeout = request_timeout - - def is_asyncio_based(self): - return False - - def on(self, event, handler=None): - """Register an event handler. - - :param event: The event name. Can be ``'connect'``, ``'message'`` or - ``'disconnect'``. - :param handler: The function that should be invoked to handle the - event. When this parameter is not given, the method - acts as a decorator for the handler function. - - Example usage:: - - # as a decorator: - @eio.on('connect') - def connect_handler(): - print('Connection request') - - # as a method: - def message_handler(msg): - print('Received message: ', msg) - eio.send('response') - eio.on('message', message_handler) - """ - if event not in self.event_names: - raise ValueError('Invalid event') - - def set_handler(handler): - self.handlers[event] = handler - return handler - - if handler is None: - return set_handler - set_handler(handler) - - def connect(self, url, headers=None, transports=None, - engineio_path='engine.io'): - """Connect to an Engine.IO server. - - :param url: The URL of the Engine.IO server. It can include custom - query string parameters if required by the server. - :param headers: A dictionary with custom headers to send with the - connection request. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. If not - given, the polling transport is connected first, - then an upgrade to websocket is attempted. - :param engineio_path: The endpoint where the Engine.IO server is - installed. The default value is appropriate for - most cases. - - Example usage:: - - eio = engineio.Client() - eio.connect('http://localhost:5000') - """ - if self.state != 'disconnected': - raise ValueError('Client is not in a disconnected state') - valid_transports = ['polling', 'websocket'] - if transports is not None: - if isinstance(transports, str): - transports = [transports] - transports = [transport for transport in transports - if transport in valid_transports] - if not transports: - raise ValueError('No valid transports provided') - self.transports = transports or valid_transports - self.queue = self.create_queue() - return getattr(self, '_connect_' + self.transports[0])( - url, headers or {}, engineio_path) - - def wait(self): - """Wait until the connection with the server ends. - - Client applications can use this function to block the main thread - during the life of the connection. - """ - if self.read_loop_task: - self.read_loop_task.join() - - def send(self, data): - """Send a message to the server. - - :param data: The data to send to the server. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - """ - self._send_packet(packet.Packet(packet.MESSAGE, data=data)) - - def disconnect(self, abort=False): - """Disconnect from the server. - - :param abort: If set to ``True``, do not wait for background tasks - associated with the connection to end. - """ - if self.state == 'connected': - self._send_packet(packet.Packet(packet.CLOSE)) - self.queue.put(None) - self.state = 'disconnecting' - self._trigger_event('disconnect', run_async=False) - if self.current_transport == 'websocket': - self.ws.close() - if not abort: - self.read_loop_task.join() - self.state = 'disconnected' - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - - def transport(self): - """Return the name of the transport currently in use. - - The possible values returned by this function are ``'polling'`` and - ``'websocket'``. - """ - return self.current_transport - - def start_background_task(self, target, *args, **kwargs): - """Start a background task. - - This is a utility function that applications can use to start a - background task. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object that represents the background task, - on which the ``join()`` method can be invoked to wait for the task to - complete. - """ - th = threading.Thread(target=target, args=args, kwargs=kwargs) - th.start() - return th - - def sleep(self, seconds=0): - """Sleep for the requested amount of time.""" - return time.sleep(seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object.""" - q = queue.Queue(*args, **kwargs) - q.Empty = queue.Empty - return q - - def create_event(self, *args, **kwargs): - """Create an event object.""" - return threading.Event(*args, **kwargs) - - def _reset(self): - self.state = 'disconnected' - self.sid = None - - def _connect_polling(self, url, headers, engineio_path): - """Establish a long-polling connection to the Engine.IO server.""" - if requests is None: # pragma: no cover - # not installed - self.logger.error('requests package is not installed -- cannot ' - 'send HTTP requests!') - return - self.base_url = self._get_engineio_url(url, engineio_path, 'polling') - self.logger.info('Attempting polling connection to ' + self.base_url) - r = self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), headers=headers, - timeout=self.request_timeout) - if r is None or isinstance(r, str): - self._reset() - raise exceptions.ConnectionError( - r or 'Connection refused by the server') - if r.status_code < 200 or r.status_code >= 300: - self._reset() - try: - arg = r.json() - except JSONDecodeError: - arg = None - raise exceptions.ConnectionError( - 'Unexpected status code {} in server response'.format( - r.status_code), arg) - try: - p = payload.Payload(encoded_payload=r.content.decode('utf-8')) - except ValueError: - raise exceptions.ConnectionError( - 'Unexpected response from server') from None - open_packet = p.packets[0] - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError( - 'OPEN packet not returned by server') - self.logger.info( - 'Polling connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0 - self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0 - self.current_transport = 'polling' - self.base_url += '&sid=' + self.sid - - self.state = 'connected' - connected_clients.append(self) - self._trigger_event('connect', run_async=False) - - for pkt in p.packets[1:]: - self._receive_packet(pkt) - - if 'websocket' in self.upgrades and 'websocket' in self.transports: - # attempt to upgrade to websocket - if self._connect_websocket(url, headers, engineio_path): - # upgrade to websocket succeeded, we're done here - return - - # start background tasks associated with this client - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_polling) - - def _connect_websocket(self, url, headers, engineio_path): - """Establish or upgrade to a WebSocket connection with the server.""" - if websocket is None: # pragma: no cover - # not installed - self.logger.error('websocket-client package not installed, only ' - 'polling transport is available') - return False - websocket_url = self._get_engineio_url(url, engineio_path, 'websocket') - if self.sid: - self.logger.info( - 'Attempting WebSocket upgrade to ' + websocket_url) - upgrade = True - websocket_url += '&sid=' + self.sid - else: - upgrade = False - self.base_url = websocket_url - self.logger.info( - 'Attempting WebSocket connection to ' + websocket_url) - - # get cookies and other settings from the long-polling connection - # so that they are preserved when connecting to the WebSocket route - cookies = None - extra_options = {} - if self.http: - # cookies - cookies = '; '.join(["{}={}".format(cookie.name, cookie.value) - for cookie in self.http.cookies]) - for header, value in headers.items(): - if header.lower() == 'cookie': - if cookies: - cookies += '; ' - cookies += value - del headers[header] - break - - # auth - if 'Authorization' not in headers and self.http.auth is not None: - if not isinstance(self.http.auth, tuple): # pragma: no cover - raise ValueError('Only basic authentication is supported') - basic_auth = '{}:{}'.format( - self.http.auth[0], self.http.auth[1]).encode('utf-8') - basic_auth = b64encode(basic_auth).decode('utf-8') - headers['Authorization'] = 'Basic ' + basic_auth - - # cert - # this can be given as ('certfile', 'keyfile') or just 'certfile' - if isinstance(self.http.cert, tuple): - extra_options['sslopt'] = { - 'certfile': self.http.cert[0], - 'keyfile': self.http.cert[1]} - elif self.http.cert: - extra_options['sslopt'] = {'certfile': self.http.cert} - - # proxies - if self.http.proxies: - proxy_url = None - if websocket_url.startswith('ws://'): - proxy_url = self.http.proxies.get( - 'ws', self.http.proxies.get('http')) - else: # wss:// - proxy_url = self.http.proxies.get( - 'wss', self.http.proxies.get('https')) - if proxy_url: - parsed_url = urllib.parse.urlparse( - proxy_url if '://' in proxy_url - else 'scheme://' + proxy_url) - extra_options['http_proxy_host'] = parsed_url.hostname - extra_options['http_proxy_port'] = parsed_url.port - extra_options['http_proxy_auth'] = ( - (parsed_url.username, parsed_url.password) - if parsed_url.username or parsed_url.password - else None) - - # verify - if isinstance(self.http.verify, str): - if 'sslopt' in extra_options: - extra_options['sslopt']['ca_certs'] = self.http.verify - else: - extra_options['sslopt'] = {'ca_certs': self.http.verify} - elif not self.http.verify: - self.ssl_verify = False - - if not self.ssl_verify: - extra_options['sslopt'] = {"cert_reqs": ssl.CERT_NONE} - - # combine internally generated options with the ones supplied by the - # caller. The caller's options take precedence. - headers.update(self.websocket_extra_options.pop('header', {})) - extra_options['header'] = headers - extra_options['cookie'] = cookies - extra_options['enable_multithread'] = True - extra_options['timeout'] = self.request_timeout - extra_options.update(self.websocket_extra_options) - try: - ws = websocket.create_connection( - websocket_url + self._get_url_timestamp(), **extra_options) - except (ConnectionError, IOError, websocket.WebSocketException): - if upgrade: - self.logger.warning( - 'WebSocket upgrade failed: connection error') - return False - else: - raise exceptions.ConnectionError('Connection error') - if upgrade: - p = packet.Packet(packet.PING, data='probe').encode() - try: - ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - try: - p = ws.recv() - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected recv exception: %s', - str(e)) - return False - pkt = packet.Packet(encoded_packet=p) - if pkt.packet_type != packet.PONG or pkt.data != 'probe': - self.logger.warning( - 'WebSocket upgrade failed: no PONG packet') - return False - p = packet.Packet(packet.UPGRADE).encode() - try: - ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - self.current_transport = 'websocket' - self.logger.info('WebSocket upgrade was successful') - else: - try: - p = ws.recv() - except Exception as e: # pragma: no cover - raise exceptions.ConnectionError( - 'Unexpected recv exception: ' + str(e)) - open_packet = packet.Packet(encoded_packet=p) - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError('no OPEN packet') - self.logger.info( - 'WebSocket connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0 - self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0 - self.current_transport = 'websocket' - - self.state = 'connected' - connected_clients.append(self) - self._trigger_event('connect', run_async=False) - self.ws = ws - self.ws.settimeout(self.ping_interval + self.ping_timeout) - - # start background tasks associated with this client - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_websocket) - return True - - def _receive_packet(self, pkt): - """Handle incoming packets from the server.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.logger.info( - 'Received packet %s data %s', packet_name, - pkt.data if not isinstance(pkt.data, bytes) else '') - if pkt.packet_type == packet.MESSAGE: - self._trigger_event('message', pkt.data, run_async=True) - elif pkt.packet_type == packet.PING: - self._send_packet(packet.Packet(packet.PONG, pkt.data)) - elif pkt.packet_type == packet.CLOSE: - self.disconnect(abort=True) - elif pkt.packet_type == packet.NOOP: - pass - else: - self.logger.error('Received unexpected packet of type %s', - pkt.packet_type) - - def _send_packet(self, pkt): - """Queue a packet to be sent to the server.""" - if self.state != 'connected': - return - self.queue.put(pkt) - self.logger.info( - 'Sending packet %s data %s', - packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) else '') - - def _send_request( - self, method, url, headers=None, body=None, - timeout=None): # pragma: no cover - if self.http is None: - self.http = requests.Session() - if not self.ssl_verify: - self.http.verify = False - try: - return self.http.request(method, url, headers=headers, data=body, - timeout=timeout) - except requests.exceptions.RequestException as exc: - self.logger.info('HTTP %s request to %s failed with error %s.', - method, url, exc) - return str(exc) - - def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - if event in self.handlers: - if run_async: - return self.start_background_task(self.handlers[event], *args) - else: - try: - return self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - - def _get_engineio_url(self, url, engineio_path, transport): - """Generate the Engine.IO connection URL.""" - engineio_path = engineio_path.strip('/') - parsed_url = urllib.parse.urlparse(url) - - if transport == 'polling': - scheme = 'http' - elif transport == 'websocket': - scheme = 'ws' - else: # pragma: no cover - raise ValueError('invalid transport') - if parsed_url.scheme in ['https', 'wss']: - scheme += 's' - - return ('{scheme}://{netloc}/{path}/?{query}' - '{sep}transport={transport}&EIO=4').format( - scheme=scheme, netloc=parsed_url.netloc, - path=engineio_path, query=parsed_url.query, - sep='&' if parsed_url.query else '', - transport=transport) - - def _get_url_timestamp(self): - """Generate the Engine.IO query string timestamp.""" - return '&t=' + str(time.time()) - - def _read_loop_polling(self): - """Read packets by polling the Engine.IO server.""" - while self.state == 'connected': - self.logger.info( - 'Sending polling GET request to ' + self.base_url) - r = self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), - timeout=max(self.ping_interval, self.ping_timeout) + 5) - if r is None or isinstance(r, str): - self.logger.warning( - r or 'Connection refused by the server, aborting') - self.queue.put(None) - break - if r.status_code < 200 or r.status_code >= 300: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status_code) - self.queue.put(None) - break - try: - p = payload.Payload(encoded_payload=r.content.decode('utf-8')) - except ValueError: - self.logger.warning( - 'Unexpected packet from server, aborting') - self.queue.put(None) - break - for pkt in p.packets: - self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - self.write_loop_task.join() - if self.state == 'connected': - self._trigger_event('disconnect', run_async=False) - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - def _read_loop_websocket(self): - """Read packets from the Engine.IO WebSocket connection.""" - while self.state == 'connected': - p = None - try: - p = self.ws.recv() - except websocket.WebSocketTimeoutException: - self.logger.warning( - 'Server has stopped communicating, aborting') - self.queue.put(None) - break - except websocket.WebSocketConnectionClosedException: - self.logger.warning( - 'WebSocket connection was closed, aborting') - self.queue.put(None) - break - except Exception as e: - self.logger.info( - 'Unexpected error receiving packet: "%s", aborting', - str(e)) - self.queue.put(None) - break - try: - pkt = packet.Packet(encoded_packet=p) - except Exception as e: # pragma: no cover - self.logger.info( - 'Unexpected error decoding packet: "%s", aborting', str(e)) - self.queue.put(None) - break - self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - self.write_loop_task.join() - if self.state == 'connected': - self._trigger_event('disconnect', run_async=False) - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - def _write_loop(self): - """This background task sends packages to the server as they are - pushed to the send queue. - """ - while self.state == 'connected': - # to simplify the timeout handling, use the maximum of the - # ping interval and ping timeout as timeout, with an extra 5 - # seconds grace period - timeout = max(self.ping_interval, self.ping_timeout) + 5 - packets = None - try: - packets = [self.queue.get(timeout=timeout)] - except self.queue.Empty: - self.logger.error('packet queue is empty, aborting') - break - if packets == [None]: - self.queue.task_done() - packets = [] - else: - while True: - try: - packets.append(self.queue.get(block=False)) - except self.queue.Empty: - break - if packets[-1] is None: - packets = packets[:-1] - self.queue.task_done() - break - if not packets: - # empty packet list returned -> connection closed - break - if self.current_transport == 'polling': - p = payload.Payload(packets=packets) - r = self._send_request( - 'POST', self.base_url, body=p.encode(), - headers={'Content-Type': 'text/plain'}, - timeout=self.request_timeout) - for pkt in packets: - self.queue.task_done() - if r is None or isinstance(r, str): - self.logger.warning( - r or 'Connection refused by the server, aborting') - break - if r.status_code < 200 or r.status_code >= 300: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status_code) - self._reset() - break - else: - # websocket - try: - for pkt in packets: - encoded_packet = pkt.encode() - if pkt.binary: - self.ws.send_binary(encoded_packet) - else: - self.ws.send(encoded_packet) - self.queue.task_done() - except (websocket.WebSocketConnectionClosedException, - BrokenPipeError, OSError): - self.logger.warning( - 'WebSocket connection was closed, aborting') - break - self.logger.info('Exiting write loop task') diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/exceptions.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/exceptions.py deleted file mode 100644 index fb0b3e0..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/exceptions.py +++ /dev/null @@ -1,22 +0,0 @@ -class EngineIOError(Exception): - pass - - -class ContentTooLongError(EngineIOError): - pass - - -class UnknownPacketError(EngineIOError): - pass - - -class QueueEmpty(EngineIOError): - pass - - -class SocketIsClosedError(EngineIOError): - pass - - -class ConnectionError(EngineIOError): - pass diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/json.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/json.py deleted file mode 100644 index b612556..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/json.py +++ /dev/null @@ -1,16 +0,0 @@ -"""JSON-compatible module with sane defaults.""" - -from json import * # noqa: F401, F403 -from json import loads as original_loads - - -def _safe_int(s): - if len(s) > 100: - raise ValueError('Integer is too large') - return int(s) - - -def loads(*args, **kwargs): - if 'parse_int' not in kwargs: # pragma: no cover - kwargs['parse_int'] = _safe_int - return original_loads(*args, **kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/middleware.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/middleware.py deleted file mode 100644 index 5d6ffdd..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/middleware.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -from engineio.static_files import get_static_file - - -class WSGIApp(object): - """WSGI application middleware for Engine.IO. - - This middleware dispatches traffic to an Engine.IO application. It can - also serve a list of static files to the client, or forward unrelated - HTTP traffic to another WSGI application. - - :param engineio_app: The Engine.IO server. Must be an instance of the - ``engineio.Server`` class. - :param wsgi_app: The WSGI app that receives all other traffic. - :param static_files: A dictionary with static file mapping rules. See the - documentation for details on this argument. - :param engineio_path: The endpoint where the Engine.IO application should - be installed. The default value is appropriate for - most cases. - - Example usage:: - - import engineio - import eventlet - - eio = engineio.Server() - app = engineio.WSGIApp(eio, static_files={ - '/': {'content_type': 'text/html', 'filename': 'index.html'}, - '/index.html': {'content_type': 'text/html', - 'filename': 'index.html'}, - }) - eventlet.wsgi.server(eventlet.listen(('', 8000)), app) - """ - def __init__(self, engineio_app, wsgi_app=None, static_files=None, - engineio_path='engine.io'): - self.engineio_app = engineio_app - self.wsgi_app = wsgi_app - self.engineio_path = engineio_path - if not self.engineio_path.startswith('/'): - self.engineio_path = '/' + self.engineio_path - if not self.engineio_path.endswith('/'): - self.engineio_path += '/' - self.static_files = static_files or {} - - def __call__(self, environ, start_response): - if 'gunicorn.socket' in environ: - # gunicorn saves the socket under environ['gunicorn.socket'], while - # eventlet saves it under environ['eventlet.input']. Eventlet also - # stores the socket inside a wrapper class, while gunicon writes it - # directly into the environment. To give eventlet's WebSocket - # module access to this socket when running under gunicorn, here we - # copy the socket to the eventlet format. - class Input(object): - def __init__(self, socket): - self.socket = socket - - def get_socket(self): - return self.socket - - environ['eventlet.input'] = Input(environ['gunicorn.socket']) - path = environ['PATH_INFO'] - if path is not None and path.startswith(self.engineio_path): - return self.engineio_app.handle_request(environ, start_response) - else: - static_file = get_static_file(path, self.static_files) \ - if self.static_files else None - if static_file and os.path.exists(static_file['filename']): - start_response( - '200 OK', - [('Content-Type', static_file['content_type'])]) - with open(static_file['filename'], 'rb') as f: - return [f.read()] - elif self.wsgi_app is not None: - return self.wsgi_app(environ, start_response) - return self.not_found(start_response) - - def not_found(self, start_response): - start_response("404 Not Found", [('Content-Type', 'text/plain')]) - return [b'Not Found'] - - -class Middleware(WSGIApp): - """This class has been renamed to ``WSGIApp`` and is now deprecated.""" - def __init__(self, engineio_app, wsgi_app=None, - engineio_path='engine.io'): - super(Middleware, self).__init__(engineio_app, wsgi_app, - engineio_path=engineio_path) diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/packet.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/packet.py deleted file mode 100644 index a9cbd33..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/packet.py +++ /dev/null @@ -1,74 +0,0 @@ -import base64 -from engineio import json as _json - -(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6) -packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP'] - -binary_types = (bytes, bytearray) - - -class Packet(object): - """Engine.IO packet.""" - - json = _json - - def __init__(self, packet_type=NOOP, data=None, encoded_packet=None): - self.packet_type = packet_type - self.data = data - if isinstance(data, str): - self.binary = False - elif isinstance(data, binary_types): - self.binary = True - else: - self.binary = False - if self.binary and self.packet_type != MESSAGE: - raise ValueError('Binary packets can only be of type MESSAGE') - if encoded_packet is not None: - self.decode(encoded_packet) - - def encode(self, b64=False): - """Encode the packet for transmission.""" - if self.binary: - if b64: - encoded_packet = 'b' + base64.b64encode(self.data).decode( - 'utf-8') - else: - encoded_packet = self.data - else: - encoded_packet = str(self.packet_type) - if isinstance(self.data, str): - encoded_packet += self.data - elif isinstance(self.data, dict) or isinstance(self.data, list): - encoded_packet += self.json.dumps(self.data, - separators=(',', ':')) - elif self.data is not None: - encoded_packet += str(self.data) - return encoded_packet - - def decode(self, encoded_packet): - """Decode a transmitted package.""" - self.binary = isinstance(encoded_packet, binary_types) - if not self.binary and len(encoded_packet) == 0: - raise ValueError('Invalid empty packet received') - b64 = not self.binary and encoded_packet[0] == 'b' - if b64: - self.binary = True - self.packet_type = MESSAGE - self.data = base64.b64decode(encoded_packet[1:]) - else: - if self.binary and not isinstance(encoded_packet, bytes): - encoded_packet = bytes(encoded_packet) - if self.binary: - self.packet_type = MESSAGE - self.data = encoded_packet - else: - self.packet_type = int(encoded_packet[0]) - try: - self.data = self.json.loads(encoded_packet[1:]) - if isinstance(self.data, int): - # do not allow integer payloads, see - # github.com/miguelgrinberg/python-engineio/issues/75 - # for background on this decision - raise ValueError - except ValueError: - self.data = encoded_packet[1:] diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/payload.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/payload.py deleted file mode 100644 index f0e9e34..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/payload.py +++ /dev/null @@ -1,46 +0,0 @@ -import urllib - -from . import packet - - -class Payload(object): - """Engine.IO payload.""" - max_decode_packets = 16 - - def __init__(self, packets=None, encoded_payload=None): - self.packets = packets or [] - if encoded_payload is not None: - self.decode(encoded_payload) - - def encode(self, jsonp_index=None): - """Encode the payload for transmission.""" - encoded_payload = '' - for pkt in self.packets: - if encoded_payload: - encoded_payload += '\x1e' - encoded_payload += pkt.encode(b64=True) - if jsonp_index is not None: - encoded_payload = '___eio[' + \ - str(jsonp_index) + \ - ']("' + \ - encoded_payload.replace('"', '\\"') + \ - '");' - return encoded_payload - - def decode(self, encoded_payload): - """Decode a transmitted payload.""" - self.packets = [] - - if len(encoded_payload) == 0: - return - - # JSONP POST payload starts with 'd=' - if encoded_payload.startswith('d='): - encoded_payload = urllib.parse.parse_qs( - encoded_payload)['d'][0] - - encoded_packets = encoded_payload.split('\x1e') - if len(encoded_packets) > self.max_decode_packets: - raise ValueError('Too many packets in payload') - self.packets = [packet.Packet(encoded_packet=encoded_packet) - for encoded_packet in encoded_packets] diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/server.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/server.py deleted file mode 100644 index 386ca82..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/server.py +++ /dev/null @@ -1,770 +0,0 @@ -import base64 -import gzip -import importlib -import io -import logging -import secrets -import urllib -import zlib - -from . import exceptions -from . import packet -from . import payload -from . import socket - -default_logger = logging.getLogger('engineio.server') - - -class Server(object): - """An Engine.IO server. - - This class implements a fully compliant Engine.IO web server with support - for websocket and long-polling transports. - - :param async_mode: The asynchronous model to use. See the Deployment - section in the documentation for a description of the - available options. Valid async modes are "threading", - "eventlet", "gevent" and "gevent_uwsgi". If this - argument is not given, "eventlet" is tried first, then - "gevent_uwsgi", then "gevent", and finally "threading". - The first async mode that has all its dependencies - installed is the one that is chosen. - :param ping_interval: The interval in seconds at which the server pings - the client. The default is 25 seconds. For advanced - control, a two element tuple can be given, where - the first number is the ping interval and the second - is a grace period added by the server. - :param ping_timeout: The time in seconds that the client waits for the - server to respond before disconnecting. The default - is 20 seconds. - :param max_http_buffer_size: The maximum size of a message. The default - is 1,000,000 bytes. - :param allow_upgrades: Whether to allow transport upgrades or not. The - default is ``True``. - :param http_compression: Whether to compress packages when using the - polling transport. The default is ``True``. - :param compression_threshold: Only compress messages when their byte size - is greater than this value. The default is - 1024 bytes. - :param cookie: If set to a string, it is the name of the HTTP cookie the - server sends back tot he client containing the client - session id. If set to a dictionary, the ``'name'`` key - contains the cookie name and other keys define cookie - attributes, where the value of each attribute can be a - string, a callable with no arguments, or a boolean. If set - to ``None`` (the default), a cookie is not sent to the - client. - :param cors_allowed_origins: Origin or list of origins that are allowed to - connect to this server. Only the same origin - is allowed by default. Set this argument to - ``'*'`` to allow all origins, or to ``[]`` to - disable CORS handling. - :param cors_credentials: Whether credentials (cookies, authentication) are - allowed in requests to this server. The default - is ``True``. - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. Note that fatal errors are logged even when - ``logger`` is ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param async_handlers: If set to ``True``, run message event handlers in - non-blocking threads. To run handlers synchronously, - set to ``False``. The default is ``True``. - :param monitor_clients: If set to ``True``, a background task will ensure - inactive clients are closed. Set to ``False`` to - disable the monitoring task (not recommended). The - default is ``True``. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. Defaults to - ``['polling', 'websocket']``. - :param kwargs: Reserved for future extensions, any additional parameters - given as keyword arguments will be silently ignored. - """ - compression_methods = ['gzip', 'deflate'] - event_names = ['connect', 'disconnect', 'message'] - valid_transports = ['polling', 'websocket'] - _default_monitor_clients = True - sequence_number = 0 - - def __init__(self, async_mode=None, ping_interval=25, ping_timeout=20, - max_http_buffer_size=1000000, allow_upgrades=True, - http_compression=True, compression_threshold=1024, - cookie=None, cors_allowed_origins=None, - cors_credentials=True, logger=False, json=None, - async_handlers=True, monitor_clients=None, transports=None, - **kwargs): - self.ping_timeout = ping_timeout - if isinstance(ping_interval, tuple): - self.ping_interval = ping_interval[0] - self.ping_interval_grace_period = ping_interval[1] - else: - self.ping_interval = ping_interval - self.ping_interval_grace_period = 0 - self.max_http_buffer_size = max_http_buffer_size - self.allow_upgrades = allow_upgrades - self.http_compression = http_compression - self.compression_threshold = compression_threshold - self.cookie = cookie - self.cors_allowed_origins = cors_allowed_origins - self.cors_credentials = cors_credentials - self.async_handlers = async_handlers - self.sockets = {} - self.handlers = {} - self.log_message_keys = set() - self.start_service_task = monitor_clients \ - if monitor_clients is not None else self._default_monitor_clients - if json is not None: - packet.Packet.json = json - if not isinstance(logger, bool): - self.logger = logger - else: - self.logger = default_logger - if self.logger.level == logging.NOTSET: - if logger: - self.logger.setLevel(logging.INFO) - else: - self.logger.setLevel(logging.ERROR) - self.logger.addHandler(logging.StreamHandler()) - modes = self.async_modes() - if async_mode is not None: - modes = [async_mode] if async_mode in modes else [] - self._async = None - self.async_mode = None - for mode in modes: - try: - self._async = importlib.import_module( - 'engineio.async_drivers.' + mode)._async - asyncio_based = self._async['asyncio'] \ - if 'asyncio' in self._async else False - if asyncio_based != self.is_asyncio_based(): - continue # pragma: no cover - self.async_mode = mode - break - except ImportError: - pass - if self.async_mode is None: - raise ValueError('Invalid async_mode specified') - if self.is_asyncio_based() and \ - ('asyncio' not in self._async or not - self._async['asyncio']): # pragma: no cover - raise ValueError('The selected async_mode is not asyncio ' - 'compatible') - if not self.is_asyncio_based() and 'asyncio' in self._async and \ - self._async['asyncio']: # pragma: no cover - raise ValueError('The selected async_mode requires asyncio and ' - 'must use the AsyncServer class') - if transports is not None: - if isinstance(transports, str): - transports = [transports] - transports = [transport for transport in transports - if transport in self.valid_transports] - if not transports: - raise ValueError('No valid transports provided') - self.transports = transports or self.valid_transports - self.logger.info('Server initialized for %s.', self.async_mode) - - def is_asyncio_based(self): - return False - - def async_modes(self): - return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading'] - - def on(self, event, handler=None): - """Register an event handler. - - :param event: The event name. Can be ``'connect'``, ``'message'`` or - ``'disconnect'``. - :param handler: The function that should be invoked to handle the - event. When this parameter is not given, the method - acts as a decorator for the handler function. - - Example usage:: - - # as a decorator: - @eio.on('connect') - def connect_handler(sid, environ): - print('Connection request') - if environ['REMOTE_ADDR'] in blacklisted: - return False # reject - - # as a method: - def message_handler(sid, msg): - print('Received message: ', msg) - eio.send(sid, 'response') - eio.on('message', message_handler) - - The handler function receives the ``sid`` (session ID) for the - client as first argument. The ``'connect'`` event handler receives the - WSGI environment as a second argument, and can return ``False`` to - reject the connection. The ``'message'`` handler receives the message - payload as a second argument. The ``'disconnect'`` handler does not - take a second argument. - """ - if event not in self.event_names: - raise ValueError('Invalid event') - - def set_handler(handler): - self.handlers[event] = handler - return handler - - if handler is None: - return set_handler - set_handler(handler) - - def send(self, sid, data): - """Send a message to a client. - - :param sid: The session id of the recipient client. - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - """ - try: - socket = self._get_socket(sid) - except KeyError: - # the socket is not available - self.logger.warning('Cannot send to sid %s', sid) - return - socket.send(packet.Packet(packet.MESSAGE, data=data)) - - def get_session(self, sid): - """Return the user session for a client. - - :param sid: The session id of the client. - - The return value is a dictionary. Modifications made to this - dictionary are not guaranteed to be preserved unless - ``save_session()`` is called, or when the ``session`` context manager - is used. - """ - socket = self._get_socket(sid) - return socket.session - - def save_session(self, sid, session): - """Store the user session for a client. - - :param sid: The session id of the client. - :param session: The session dictionary. - """ - socket = self._get_socket(sid) - socket.session = session - - def session(self, sid): - """Return the user session for a client with context manager syntax. - - :param sid: The session id of the client. - - This is a context manager that returns the user session dictionary for - the client. Any changes that are made to this dictionary inside the - context manager block are saved back to the session. Example usage:: - - @eio.on('connect') - def on_connect(sid, environ): - username = authenticate_user(environ) - if not username: - return False - with eio.session(sid) as session: - session['username'] = username - - @eio.on('message') - def on_message(sid, msg): - with eio.session(sid) as session: - print('received message from ', session['username']) - """ - class _session_context_manager(object): - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.session = None - - def __enter__(self): - self.session = self.server.get_session(sid) - return self.session - - def __exit__(self, *args): - self.server.save_session(sid, self.session) - - return _session_context_manager(self, sid) - - def disconnect(self, sid=None): - """Disconnect a client. - - :param sid: The session id of the client to close. If this parameter - is not given, then all clients are closed. - """ - if sid is not None: - try: - socket = self._get_socket(sid) - except KeyError: # pragma: no cover - # the socket was already closed or gone - pass - else: - socket.close() - if sid in self.sockets: # pragma: no cover - del self.sockets[sid] - else: - for client in self.sockets.values(): - client.close() - self.sockets = {} - - def transport(self, sid): - """Return the name of the transport used by the client. - - The two possible values returned by this function are ``'polling'`` - and ``'websocket'``. - - :param sid: The session of the client. - """ - return 'websocket' if self._get_socket(sid).upgraded else 'polling' - - def handle_request(self, environ, start_response): - """Handle an HTTP request from the client. - - This is the entry point of the Engine.IO application, using the same - interface as a WSGI application. For the typical usage, this function - is invoked by the :class:`Middleware` instance, but it can be invoked - directly when the middleware is not used. - - :param environ: The WSGI environment. - :param start_response: The WSGI ``start_response`` function. - - This function returns the HTTP response body to deliver to the client - as a byte sequence. - """ - if self.cors_allowed_origins != []: - # Validate the origin header if present - # This is important for WebSocket more than for HTTP, since - # browsers only apply CORS controls to HTTP. - origin = environ.get('HTTP_ORIGIN') - if origin: - allowed_origins = self._cors_allowed_origins(environ) - if allowed_origins is not None and origin not in \ - allowed_origins: - self._log_error_once( - origin + ' is not an accepted origin.', 'bad-origin') - r = self._bad_request('Not an accepted origin.') - start_response(r['status'], r['headers']) - return [r['response']] - - method = environ['REQUEST_METHOD'] - query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) - jsonp = False - jsonp_index = None - - # make sure the client uses an allowed transport - transport = query.get('transport', ['polling'])[0] - if transport not in self.transports: - self._log_error_once('Invalid transport', 'bad-transport') - r = self._bad_request('Invalid transport') - start_response(r['status'], r['headers']) - return [r['response']] - - # make sure the client speaks a compatible Engine.IO version - sid = query['sid'][0] if 'sid' in query else None - if sid is None and query.get('EIO') != ['4']: - self._log_error_once( - 'The client is using an unsupported version of the Socket.IO ' - 'or Engine.IO protocols', 'bad-version') - r = self._bad_request( - 'The client is using an unsupported version of the Socket.IO ' - 'or Engine.IO protocols') - start_response(r['status'], r['headers']) - return [r['response']] - - if 'j' in query: - jsonp = True - try: - jsonp_index = int(query['j'][0]) - except (ValueError, KeyError, IndexError): - # Invalid JSONP index number - pass - - if jsonp and jsonp_index is None: - self._log_error_once('Invalid JSONP index number', - 'bad-jsonp-index') - r = self._bad_request('Invalid JSONP index number') - elif method == 'GET': - if sid is None: - # transport must be one of 'polling' or 'websocket'. - # if 'websocket', the HTTP_UPGRADE header must match. - upgrade_header = environ.get('HTTP_UPGRADE').lower() \ - if 'HTTP_UPGRADE' in environ else None - if transport == 'polling' \ - or transport == upgrade_header == 'websocket': - r = self._handle_connect(environ, start_response, - transport, jsonp_index) - else: - self._log_error_once('Invalid websocket upgrade', - 'bad-upgrade') - r = self._bad_request('Invalid websocket upgrade') - else: - if sid not in self.sockets: - self._log_error_once('Invalid session ' + sid, 'bad-sid') - r = self._bad_request('Invalid session') - else: - socket = self._get_socket(sid) - try: - packets = socket.handle_get_request( - environ, start_response) - if isinstance(packets, list): - r = self._ok(packets, jsonp_index=jsonp_index) - else: - r = packets - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - self.disconnect(sid) - r = self._bad_request() - if sid in self.sockets and self.sockets[sid].closed: - del self.sockets[sid] - elif method == 'POST': - if sid is None or sid not in self.sockets: - self._log_error_once( - 'Invalid session ' + (sid or 'None'), 'bad-sid') - r = self._bad_request('Invalid session') - else: - socket = self._get_socket(sid) - try: - socket.handle_post_request(environ) - r = self._ok(jsonp_index=jsonp_index) - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - self.disconnect(sid) - r = self._bad_request() - except: # pragma: no cover - # for any other unexpected errors, we log the error - # and keep going - self.logger.exception('post request handler error') - r = self._ok(jsonp_index=jsonp_index) - elif method == 'OPTIONS': - r = self._ok() - else: - self.logger.warning('Method %s not supported', method) - r = self._method_not_found() - - if not isinstance(r, dict): - return r or [] - if self.http_compression and \ - len(r['response']) >= self.compression_threshold: - encodings = [e.split(';')[0].strip() for e in - environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] - for encoding in encodings: - if encoding in self.compression_methods: - r['response'] = \ - getattr(self, '_' + encoding)(r['response']) - r['headers'] += [('Content-Encoding', encoding)] - break - cors_headers = self._cors_headers(environ) - start_response(r['status'], r['headers'] + cors_headers) - return [r['response']] - - def start_background_task(self, target, *args, **kwargs): - """Start a background task using the appropriate async model. - - This is a utility function that applications can use to start a - background task using the method that is compatible with the - selected async mode. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object that represents the background task, - on which the ``join()`` methond can be invoked to wait for the task to - complete. - """ - th = self._async['thread'](target=target, args=args, kwargs=kwargs) - th.start() - return th # pragma: no cover - - def sleep(self, seconds=0): - """Sleep for the requested amount of time using the appropriate async - model. - - This is a utility function that applications can use to put a task to - sleep without having to worry about using the correct call for the - selected async mode. - """ - return self._async['sleep'](seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object using the appropriate async model. - - This is a utility function that applications can use to create a queue - without having to worry about using the correct call for the selected - async mode. - """ - return self._async['queue'](*args, **kwargs) - - def get_queue_empty_exception(self): - """Return the queue empty exception for the appropriate async model. - - This is a utility function that applications can use to work with a - queue without having to worry about using the correct call for the - selected async mode. - """ - return self._async['queue_empty'] - - def create_event(self, *args, **kwargs): - """Create an event object using the appropriate async model. - - This is a utility function that applications can use to create an - event without having to worry about using the correct call for the - selected async mode. - """ - return self._async['event'](*args, **kwargs) - - def generate_id(self): - """Generate a unique session id.""" - id = base64.b64encode( - secrets.token_bytes(12) + self.sequence_number.to_bytes(3, 'big')) - self.sequence_number = (self.sequence_number + 1) & 0xffffff - return id.decode('utf-8').replace('/', '_').replace('+', '-') - - def _generate_sid_cookie(self, sid, attributes): - """Generate the sid cookie.""" - cookie = attributes.get('name', 'io') + '=' + sid - for attribute, value in attributes.items(): - if attribute == 'name': - continue - if callable(value): - value = value() - if value is True: - cookie += '; ' + attribute - else: - cookie += '; ' + attribute + '=' + value - return cookie - - def _handle_connect(self, environ, start_response, transport, - jsonp_index=None): - """Handle a client connection request.""" - if self.start_service_task: - # start the service task to monitor connected clients - self.start_service_task = False - self.start_background_task(self._service_task) - - sid = self.generate_id() - s = socket.Socket(self, sid) - self.sockets[sid] = s - - pkt = packet.Packet(packet.OPEN, { - 'sid': sid, - 'upgrades': self._upgrades(sid, transport), - 'pingTimeout': int(self.ping_timeout * 1000), - 'pingInterval': int( - self.ping_interval + self.ping_interval_grace_period) * 1000}) - s.send(pkt) - s.schedule_ping() - - # NOTE: some sections below are marked as "no cover" to workaround - # what seems to be a bug in the coverage package. All the lines below - # are covered by tests, but some are not reported as such for some - # reason - ret = self._trigger_event('connect', sid, environ, run_async=False) - if ret is not None and ret is not True: # pragma: no cover - del self.sockets[sid] - self.logger.warning('Application rejected connection') - return self._unauthorized(ret or None) - - if transport == 'websocket': # pragma: no cover - ret = s.handle_get_request(environ, start_response) - if s.closed and sid in self.sockets: - # websocket connection ended, so we are done - del self.sockets[sid] - return ret - else: # pragma: no cover - s.connected = True - headers = None - if self.cookie: - if isinstance(self.cookie, dict): - headers = [( - 'Set-Cookie', - self._generate_sid_cookie(sid, self.cookie) - )] - else: - headers = [( - 'Set-Cookie', - self._generate_sid_cookie(sid, { - 'name': self.cookie, 'path': '/', 'SameSite': 'Lax' - }) - )] - try: - return self._ok(s.poll(), headers=headers, - jsonp_index=jsonp_index) - except exceptions.QueueEmpty: - return self._bad_request() - - def _upgrades(self, sid, transport): - """Return the list of possible upgrades for a client connection.""" - if not self.allow_upgrades or self._get_socket(sid).upgraded or \ - transport == 'websocket': - return [] - if self._async['websocket'] is None: # pragma: no cover - self._log_error_once( - 'The WebSocket transport is not available, you must install a ' - 'WebSocket server that is compatible with your async mode to ' - 'enable it. See the documentation for details.', - 'no-websocket') - return [] - return ['websocket'] - - def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - if event in self.handlers: - if run_async: - return self.start_background_task(self.handlers[event], *args) - else: - try: - return self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - - def _get_socket(self, sid): - """Return the socket object for a given session.""" - try: - s = self.sockets[sid] - except KeyError: - raise KeyError('Session not found') - if s.closed: - del self.sockets[sid] - raise KeyError('Session is disconnected') - return s - - def _ok(self, packets=None, headers=None, jsonp_index=None): - """Generate a successful HTTP response.""" - if packets is not None: - if headers is None: - headers = [] - headers += [('Content-Type', 'text/plain; charset=UTF-8')] - return {'status': '200 OK', - 'headers': headers, - 'response': payload.Payload(packets=packets).encode( - jsonp_index=jsonp_index).encode('utf-8')} - else: - return {'status': '200 OK', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'OK'} - - def _bad_request(self, message=None): - """Generate a bad request HTTP error response.""" - if message is None: - message = 'Bad Request' - message = packet.Packet.json.dumps(message) - return {'status': '400 BAD REQUEST', - 'headers': [('Content-Type', 'text/plain')], - 'response': message.encode('utf-8')} - - def _method_not_found(self): - """Generate a method not found HTTP error response.""" - return {'status': '405 METHOD NOT FOUND', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'Method Not Found'} - - def _unauthorized(self, message=None): - """Generate a unauthorized HTTP error response.""" - if message is None: - message = 'Unauthorized' - message = packet.Packet.json.dumps(message) - return {'status': '401 UNAUTHORIZED', - 'headers': [('Content-Type', 'application/json')], - 'response': message.encode('utf-8')} - - def _cors_allowed_origins(self, environ): - default_origins = [] - if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ: - default_origins.append('{scheme}://{host}'.format( - scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST'])) - if 'HTTP_X_FORWARDED_PROTO' in environ or \ - 'HTTP_X_FORWARDED_HOST' in environ: - scheme = environ.get( - 'HTTP_X_FORWARDED_PROTO', - environ['wsgi.url_scheme']).split(',')[0].strip() - default_origins.append('{scheme}://{host}'.format( - scheme=scheme, host=environ.get( - 'HTTP_X_FORWARDED_HOST', environ['HTTP_HOST']).split( - ',')[0].strip())) - if self.cors_allowed_origins is None: - allowed_origins = default_origins - elif self.cors_allowed_origins == '*': - allowed_origins = None - elif isinstance(self.cors_allowed_origins, str): - allowed_origins = [self.cors_allowed_origins] - elif callable(self.cors_allowed_origins): - origin = environ.get('HTTP_ORIGIN') - allowed_origins = [origin] \ - if self.cors_allowed_origins(origin) else [] - else: - allowed_origins = self.cors_allowed_origins - return allowed_origins - - def _cors_headers(self, environ): - """Return the cross-origin-resource-sharing headers.""" - if self.cors_allowed_origins == []: - # special case, CORS handling is completely disabled - return [] - headers = [] - allowed_origins = self._cors_allowed_origins(environ) - if 'HTTP_ORIGIN' in environ and \ - (allowed_origins is None or environ['HTTP_ORIGIN'] in - allowed_origins): - headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])] - if environ['REQUEST_METHOD'] == 'OPTIONS': - headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')] - if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ: - headers += [('Access-Control-Allow-Headers', - environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])] - if self.cors_credentials: - headers += [('Access-Control-Allow-Credentials', 'true')] - return headers - - def _gzip(self, response): - """Apply gzip compression to a response.""" - bytesio = io.BytesIO() - with gzip.GzipFile(fileobj=bytesio, mode='w') as gz: - gz.write(response) - return bytesio.getvalue() - - def _deflate(self, response): - """Apply deflate compression to a response.""" - return zlib.compress(response) - - def _log_error_once(self, message, message_key): - """Log message with logging.ERROR level the first time, then log - with given level.""" - if message_key not in self.log_message_keys: - self.logger.error(message + ' (further occurrences of this error ' - 'will be logged with level INFO)') - self.log_message_keys.add(message_key) - else: - self.logger.info(message) - - def _service_task(self): # pragma: no cover - """Monitor connected clients and clean up those that time out.""" - while True: - if len(self.sockets) == 0: - # nothing to do - self.sleep(self.ping_timeout) - continue - - # go through the entire client list in a ping interval cycle - sleep_interval = float(self.ping_timeout) / len(self.sockets) - - try: - # iterate over the current clients - for s in self.sockets.copy().values(): - if not s.closing and not s.closed: - s.check_ping_timeout() - self.sleep(sleep_interval) - except (SystemExit, KeyboardInterrupt): - self.logger.info('service task canceled') - break - except: - # an unexpected exception has occurred, log it and continue - self.logger.exception('service task exception') diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/socket.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/socket.py deleted file mode 100644 index be0c83f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/socket.py +++ /dev/null @@ -1,260 +0,0 @@ -import sys -import time - -from . import exceptions -from . import packet -from . import payload - - -class Socket(object): - """An Engine.IO socket.""" - upgrade_protocols = ['websocket'] - - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.queue = self.server.create_queue() - self.last_ping = None - self.connected = False - self.upgrading = False - self.upgraded = False - self.closing = False - self.closed = False - self.session = {} - - def poll(self): - """Wait for packets to send to the client.""" - queue_empty = self.server.get_queue_empty_exception() - try: - packets = [self.queue.get( - timeout=self.server.ping_interval + self.server.ping_timeout)] - self.queue.task_done() - except queue_empty: - raise exceptions.QueueEmpty() - if packets == [None]: - return [] - while True: - try: - pkt = self.queue.get(block=False) - self.queue.task_done() - if pkt is None: - self.queue.put(None) - break - packets.append(pkt) - except queue_empty: - break - return packets - - def receive(self, pkt): - """Receive packet from the client.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.server.logger.info('%s: Received packet %s data %s', - self.sid, packet_name, - pkt.data if not isinstance(pkt.data, bytes) - else '') - if pkt.packet_type == packet.PONG: - self.schedule_ping() - elif pkt.packet_type == packet.MESSAGE: - self.server._trigger_event('message', self.sid, pkt.data, - run_async=self.server.async_handlers) - elif pkt.packet_type == packet.UPGRADE: - self.send(packet.Packet(packet.NOOP)) - elif pkt.packet_type == packet.CLOSE: - self.close(wait=False, abort=True) - else: - raise exceptions.UnknownPacketError() - - def check_ping_timeout(self): - """Make sure the client is still responding to pings.""" - if self.closed: - raise exceptions.SocketIsClosedError() - if self.last_ping and \ - time.time() - self.last_ping > self.server.ping_timeout: - self.server.logger.info('%s: Client is gone, closing socket', - self.sid) - # Passing abort=False here will cause close() to write a - # CLOSE packet. This has the effect of updating half-open sockets - # to their correct state of disconnected - self.close(wait=False, abort=False) - return False - return True - - def send(self, pkt): - """Send a packet to the client.""" - if not self.check_ping_timeout(): - return - else: - self.queue.put(pkt) - self.server.logger.info('%s: Sending packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - - def handle_get_request(self, environ, start_response): - """Handle a long-polling GET request from the client.""" - connections = [ - s.strip() - for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] - transport = environ.get('HTTP_UPGRADE', '').lower() - if 'upgrade' in connections and transport in self.upgrade_protocols: - self.server.logger.info('%s: Received request to upgrade to %s', - self.sid, transport) - return getattr(self, '_upgrade_' + transport)(environ, - start_response) - if self.upgrading or self.upgraded: - # we are upgrading to WebSocket, do not return any more packets - # through the polling endpoint - return [packet.Packet(packet.NOOP)] - try: - packets = self.poll() - except exceptions.QueueEmpty: - exc = sys.exc_info() - self.close(wait=False) - raise exc[1].with_traceback(exc[2]) - return packets - - def handle_post_request(self, environ): - """Handle a long-polling POST request from the client.""" - length = int(environ.get('CONTENT_LENGTH', '0')) - if length > self.server.max_http_buffer_size: - raise exceptions.ContentTooLongError() - else: - body = environ['wsgi.input'].read(length).decode('utf-8') - p = payload.Payload(encoded_payload=body) - for pkt in p.packets: - self.receive(pkt) - - def close(self, wait=True, abort=False): - """Close the socket connection.""" - if not self.closed and not self.closing: - self.closing = True - self.server._trigger_event('disconnect', self.sid, run_async=False) - if not abort: - self.send(packet.Packet(packet.CLOSE)) - self.closed = True - self.queue.put(None) - if wait: - self.queue.join() - - def schedule_ping(self): - def send_ping(): - self.last_ping = None - self.server.sleep(self.server.ping_interval) - if not self.closing and not self.closed: - self.last_ping = time.time() - self.send(packet.Packet(packet.PING)) - - self.server.start_background_task(send_ping) - - def _upgrade_websocket(self, environ, start_response): - """Upgrade the connection from polling to websocket.""" - if self.upgraded: - raise IOError('Socket has been upgraded already') - if self.server._async['websocket'] is None: - # the selected async mode does not support websocket - return self.server._bad_request() - ws = self.server._async['websocket'](self._websocket_handler) - return ws(environ, start_response) - - def _websocket_handler(self, ws): - """Engine.IO handler for websocket transport.""" - def websocket_wait(): - data = ws.wait() - if data and len(data) > self.server.max_http_buffer_size: - raise ValueError('packet is too large') - return data - - # try to set a socket timeout matching the configured ping interval - # and timeout - for attr in ['_sock', 'socket']: # pragma: no cover - if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'): - getattr(ws, attr).settimeout( - self.server.ping_interval + self.server.ping_timeout) - - if self.connected: - # the socket was already connected, so this is an upgrade - self.upgrading = True # hold packet sends during the upgrade - - pkt = websocket_wait() - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.PING or \ - decoded_pkt.data != 'probe': - self.server.logger.info( - '%s: Failed websocket upgrade, no PING packet', self.sid) - self.upgrading = False - return [] - ws.send(packet.Packet(packet.PONG, data='probe').encode()) - self.queue.put(packet.Packet(packet.NOOP)) # end poll - - pkt = websocket_wait() - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.UPGRADE: - self.upgraded = False - self.server.logger.info( - ('%s: Failed websocket upgrade, expected UPGRADE packet, ' - 'received %s instead.'), - self.sid, pkt) - self.upgrading = False - return [] - self.upgraded = True - self.upgrading = False - else: - self.connected = True - self.upgraded = True - - # start separate writer thread - def writer(): - while True: - packets = None - try: - packets = self.poll() - except exceptions.QueueEmpty: - break - if not packets: - # empty packet list returned -> connection closed - break - try: - for pkt in packets: - ws.send(pkt.encode()) - except: - break - writer_task = self.server.start_background_task(writer) - - self.server.logger.info( - '%s: Upgrade to websocket successful', self.sid) - - while True: - p = None - try: - p = websocket_wait() - except Exception as e: - # if the socket is already closed, we can assume this is a - # downstream error of that - if not self.closed: # pragma: no cover - self.server.logger.info( - '%s: Unexpected error "%s", closing connection', - self.sid, str(e)) - break - if p is None: - # connection closed by client - break - pkt = packet.Packet(encoded_packet=p) - try: - self.receive(pkt) - except exceptions.UnknownPacketError: # pragma: no cover - pass - except exceptions.SocketIsClosedError: # pragma: no cover - self.server.logger.info('Receive error -- socket is closed') - break - except: # pragma: no cover - # if we get an unexpected exception we log the error and exit - # the connection properly - self.server.logger.exception('Unknown receive error') - break - - self.queue.put(None) # unlock the writer task so that it can exit - writer_task.join() - self.close(wait=False, abort=True) - - return [] diff --git a/venv_flaskchat/lib/python3.11/site-packages/engineio/static_files.py b/venv_flaskchat/lib/python3.11/site-packages/engineio/static_files.py deleted file mode 100644 index 77c8915..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/engineio/static_files.py +++ /dev/null @@ -1,60 +0,0 @@ -content_types = { - 'css': 'text/css', - 'gif': 'image/gif', - 'html': 'text/html', - 'jpg': 'image/jpeg', - 'js': 'application/javascript', - 'json': 'application/json', - 'png': 'image/png', - 'txt': 'text/plain', -} - - -def get_static_file(path, static_files): - """Return the local filename and content type for the requested static - file URL. - - :param path: the path portion of the requested URL. - :param static_files: a static file configuration dictionary. - - This function returns a dictionary with two keys, "filename" and - "content_type". If the requested URL does not match any static file, the - return value is None. - """ - extra_path = '' - if path in static_files: - f = static_files[path] - else: - f = None - while path != '': - path, last = path.rsplit('/', 1) - extra_path = '/' + last + extra_path - if path in static_files: - f = static_files[path] - break - elif path + '/' in static_files: - f = static_files[path + '/'] - break - if f: - if isinstance(f, str): - f = {'filename': f} - else: - f = f.copy() # in case it is mutated below - if f['filename'].endswith('/') and extra_path.startswith('/'): - extra_path = extra_path[1:] - f['filename'] += extra_path - if f['filename'].endswith('/'): - if '' in static_files: - if isinstance(static_files[''], str): - f['filename'] += static_files[''] - else: - f['filename'] += static_files['']['filename'] - if 'content_type' in static_files['']: - f['content_type'] = static_files['']['content_type'] - else: - f['filename'] += 'index.html' - if 'content_type' not in f: - ext = f['filename'].rsplit('.')[-1] - f['content_type'] = content_types.get( - ext, 'application/octet-stream') - return f diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/flask/__init__.py deleted file mode 100644 index 0bef221..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/__init__.py +++ /dev/null @@ -1,102 +0,0 @@ -from . import json as json -from .app import Flask as Flask -from .app import Request as Request -from .app import Response as Response -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import abort as abort -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import redirect as redirect -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string -from .templating import stream_template as stream_template -from .templating import stream_template_string as stream_template_string - -__version__ = "2.3.2" - - -def __getattr__(name): - if name == "_app_ctx_stack": - import warnings - from .globals import __app_ctx_stack - - warnings.warn( - "'_app_ctx_stack' is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - return __app_ctx_stack - - if name == "_request_ctx_stack": - import warnings - from .globals import __request_ctx_stack - - warnings.warn( - "'_request_ctx_stack' is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - return __request_ctx_stack - - if name == "escape": - import warnings - from markupsafe import escape - - warnings.warn( - "'flask.escape' is deprecated and will be removed in Flask 2.4. Import" - " 'markupsafe.escape' instead.", - DeprecationWarning, - stacklevel=2, - ) - return escape - - if name == "Markup": - import warnings - from markupsafe import Markup - - warnings.warn( - "'flask.Markup' is deprecated and will be removed in Flask 2.4. Import" - " 'markupsafe.Markup' instead.", - DeprecationWarning, - stacklevel=2, - ) - return Markup - - if name == "signals_available": - import warnings - - warnings.warn( - "'signals_available' is deprecated and will be removed in Flask 2.4." - " Signals are always available", - DeprecationWarning, - stacklevel=2, - ) - return True - - raise AttributeError(name) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__main__.py b/venv_flaskchat/lib/python3.11/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index e21fc3a..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc deleted file mode 100644 index a501d54..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc deleted file mode 100644 index 0273a9f..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc deleted file mode 100644 index 4991826..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc deleted file mode 100644 index bf88b09..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc deleted file mode 100644 index 358ca4b..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc deleted file mode 100644 index f129b7c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc deleted file mode 100644 index a8c1e1d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc deleted file mode 100644 index ee3a5ad..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc deleted file mode 100644 index f600da0..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc deleted file mode 100644 index e96d707..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc deleted file mode 100644 index b6df123..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc deleted file mode 100644 index 3b2482d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc deleted file mode 100644 index e1adb2d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc deleted file mode 100644 index c116753..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc deleted file mode 100644 index fd682aa..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc deleted file mode 100644 index cf4f763..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc deleted file mode 100644 index 0bb3b8d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc deleted file mode 100644 index f112baf..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/app.py b/venv_flaskchat/lib/python3.11/site-packages/flask/app.py deleted file mode 100644 index 3b6b38d..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/app.py +++ /dev/null @@ -1,2213 +0,0 @@ -from __future__ import annotations - -import logging -import os -import sys -import typing as t -import weakref -from collections.abc import Iterator as _abc_Iterator -from datetime import timedelta -from inspect import iscoroutinefunction -from itertools import chain -from types import TracebackType -from urllib.parse import quote as _url_quote - -import click -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import Aborter -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import cached_property -from werkzeug.utils import redirect as _wz_redirect -from werkzeug.wrappers import Response as BaseResponse - -from . import cli -from . import typing as ft -from .config import Config -from .config import ConfigAttribute -from .ctx import _AppCtxGlobals -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _cv_app -from .globals import _cv_request -from .globals import g -from .globals import request -from .globals import request_ctx -from .globals import session -from .helpers import _split_blueprint_path -from .helpers import get_debug_flag -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .json.provider import DefaultJSONProvider -from .json.provider import JSONProvider -from .logging import create_logger -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod -from .sessions import SecureCookieSessionInterface -from .sessions import SessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import DispatchingJinjaLoader -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: # pragma: no cover - from .blueprints import Blueprint - from .testing import FlaskClient - from .testing import FlaskCliRunner - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class = Response - - #: The class of the object assigned to :attr:`aborter`, created by - #: :meth:`create_aborter`. That object is called by - #: :func:`flask.abort` to raise HTTP errors, and can be - #: called directly as well. - #: - #: Defaults to :class:`werkzeug.exceptions.Aborter`. - #: - #: .. versionadded:: 2.2 - aborter_class = Aborter - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute("SECRET_KEY") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute( - "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta - ) - - json_provider_class: type[JSONProvider] = DefaultJSONProvider - """A subclass of :class:`~flask.json.provider.JSONProvider`. An - instance is created and assigned to :attr:`app.json` when creating - the app. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses - Python's built-in :mod:`json` library. A different provider can use - a different JSON library. - - .. versionadded:: 2.2 - """ - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict = {} - - #: Default configuration parameters. - default_config = ImmutableDict( - { - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "SECRET_KEY": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - } - ) - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: type[FlaskClient] | None = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: type[FlaskCliRunner] | None = None - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface: SessionInterface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: An instance of :attr:`aborter_class` created by - #: :meth:`make_aborter`. This is called by :func:`flask.abort` - #: to raise HTTP errors, and can be called directly as well. - #: - #: .. versionadded:: 2.2 - #: Moved from ``flask.abort``, which calls this object. - self.aborter = self.make_aborter() - - self.json: JSONProvider = self.json_provider_class(self) - """Provides access to JSON methods. Functions in ``flask.json`` - will call methods on this provider when the application context - is active. Used for handling JSON requests and responses. - - An instance of :attr:`json_provider_class`. Can be customized by - changing that attribute on a subclass, or by assigning to this - attribute afterwards. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, - uses Python's built-in :mod:`json` library. A different provider - can use a different JSON library. - - .. versionadded:: 2.2 - """ - - #: A list of functions that are called by - #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function is called - #: with ``error``, ``endpoint`` and ``values``. If a function - #: returns ``None`` or raises a ``BuildError``, it is skipped. - #: Otherwise, its return value is returned by ``url_for``. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: list[ - t.Callable[[Exception, str, dict[str, t.Any]], str] - ] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: dict[str, Blueprint] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class() - - self.url_map.host_matching = host_matching - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert ( - bool(static_host) == host_matching - ), "Invalid static_host/host_matching combination" - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_first_request: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called" - " on the application. It has already handled its first" - " request, any changes will not be applied" - " consistently.\n" - "Make sure all imports, decorators, functions, etc." - " needed to set up the application are done before" - " running it." - ) - - @cached_property - def name(self) -> str: # type: ignore - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - @property - def got_first_request(self) -> bool: - """This attribute is set to ``True`` if the application started - handling the first request. - - .. deprecated:: 2.3 - Will be removed in Flask 2.4. - - .. versionadded:: 0.8 - """ - import warnings - - warnings.warn( - "'got_first_request' is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - return self._got_first_request - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def make_aborter(self) -> Aborter: - """Create the object to assign to :attr:`aborter`. That object - is called by :func:`flask.abort` to raise HTTP errors, and can - be called directly as well. - - By default, this creates an instance of :attr:`aborter_class`, - which defaults to :class:`werkzeug.exceptions.Aborter`. - - .. versionadded:: 2.2 - """ - return self.aborter_class() - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Opens a resource from the application's instance folder - (:attr:`instance_path`). Otherwise works like - :meth:`open_resource`. Instance resources can also be opened for - writing. - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - return open(os.path.join(self.instance_path, resource), mode) - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] - - if auto_reload is None: - auto_reload = self.debug - - options["auto_reload"] = auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=self.url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = self.json.dumps - return rv - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionchanged:: 2.2 - Autoescaping is now enabled by default for ``.svg`` files. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) - - def update_template_context(self, context: dict) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[str | None] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(func()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start the - development server, an interactive debugger will be shown for unhandled - exceptions, and the server will be reloaded when code changes. This maps to the - :data:`DEBUG` config key. It may not behave as expected if set late. - - **Do not enable debug mode when deploying in production.** - - Default: ``False`` - """ - return self.config["DEBUG"] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - - if self.config["TEMPLATES_AUTO_RELOAD"] is None: - self.jinja_env.auto_reload = value - - def run( - self, - host: str | None = None, - port: int | None = None, - debug: bool | None = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Ignore this call so that it doesn't start another server if - # the 'flask run' command is used. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - if not is_running_from_reloader(): - click.secho( - " * Ignoring a call to 'app.run()' that would block" - " the current 'flask' CLI command.\n" - " Only call 'app.run()' in an 'if __name__ ==" - ' "__main__"\' guard.', - fg="red", - ) - - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, env var overrides existing value - if "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.debug, self.name) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) # type: ignore - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView[Blueprint]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule = self.url_rule_class(rule, methods=methods, **options) - rule.provide_automatic_options = provide_automatic_options # type: ignore - - self.url_map.add(rule) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def teardown_appcontext(self, f: T_teardown) -> T_teardown: - """Registers a function to be called when the application - context is popped. The application context is typically popped - after the request context for each request, at the end of CLI - commands, or after a manually pushed context ends. - - .. code-block:: python - - with app.app_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the app context is - made inactive. Since a request context typically also manages an - application context it would also be called when you pop a - request context. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor( - self, f: T_shell_context_processor - ) -> T_shell_context_processor: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler(self, e: Exception) -> ft.ErrorHandlerCallable | None: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*request.blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def handle_http_exception( - self, e: HTTPException - ) -> HTTPException | ft.ResponseReturnValue: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e) - if handler is None: - return e - return self.ensure_sync(handler)(e) - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def handle_user_exception( - self, e: Exception - ) -> HTTPException | ft.ResponseReturnValue: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) - propagate = self.config["PROPAGATE_EXCEPTIONS"] - - if propagate is None: - propagate = self.testing or self.debug - - if propagate: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: InternalServerError | ft.ResponseReturnValue - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def raise_routing_exception(self, request: Request) -> t.NoReturn: - """Intercept routing exceptions and possibly do something else. - - In debug mode, intercept a routing redirect and replace it with - an error if the body will be discarded. - - With modern Werkzeug this shouldn't occur, since it now uses a - 308 status which tells the browser to resend the method and - body. - - .. versionchanged:: 2.1 - Don't intercept 307 and 308 redirects. - - :meta private: - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.routing_exception.code in {307, 308} - or request.method in {"GET", "HEAD", "OPTIONS"} - ): - raise request.routing_exception # type: ignore - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def dispatch_request(self) -> ft.ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = request_ctx.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule: Rule = req.url_rule # type: ignore[assignment] - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] - return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self._got_first_request = True - - try: - request_started.send(self, _async_wrapper=self.ensure_sync) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: ft.ResponseReturnValue | HTTPException, - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send( - self, _async_wrapper=self.ensure_sync, response=response - ) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = request_ctx.url_adapter - methods = adapter.allowed_methods() # type: ignore[union-attr] - rv = self.response_class() - rv.allow.update(methods) - return rv - - def should_ignore_error(self, error: BaseException | None) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def ensure_sync(self, func: t.Callable) -> t.Callable: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - return asgiref_async_to_sync(func) - - def url_for( - self, - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, - ) -> str: - """Generate a URL to the given endpoint with the given values. - - This is called by :func:`flask.url_for`, and can be called - directly as well. - - An *endpoint* is the name of a URL rule, usually added with - :meth:`@app.route() `, and usually the same name as the - view function. A route defined in a :class:`~flask.Blueprint` - will prepend the blueprint's name separated by a ``.`` to the - endpoint. - - In some cases, such as email messages, you want URLs to include - the scheme and domain, like ``https://example.com/hello``. When - not in an active request, URLs will be external by default, but - this requires setting :data:`SERVER_NAME` so Flask knows what - domain to use. :data:`APPLICATION_ROOT` and - :data:`PREFERRED_URL_SCHEME` should also be configured as - needed. This config is only used when not in an active request. - - Functions can be decorated with :meth:`url_defaults` to modify - keyword arguments before the URL is built. - - If building fails for some reason, such as an unknown endpoint - or incorrect values, the app's :meth:`handle_url_build_error` - method is called. If that returns a string, that is returned, - otherwise a :exc:`~werkzeug.routing.BuildError` is raised. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it - is external. - :param _external: If given, prefer the URL to be internal - (False) or require it to be external (True). External URLs - include the scheme and domain. When not in an active - request, URLs are external by default. - :param values: Values to use for the variable parts of the URL - rule. Unknown keys are appended as query string arguments, - like ``?a=b&c=d``. - - .. versionadded:: 2.2 - Moved from ``flask.url_for``, which calls this method. - """ - req_ctx = _cv_request.get(None) - - if req_ctx is not None: - url_adapter = req_ctx.url_adapter - blueprint_name = req_ctx.request.blueprint - - # If the endpoint starts with "." and the request matches a - # blueprint, the endpoint is relative to the blueprint. - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - # When in a request, generate a URL without scheme and - # domain by default, unless a scheme is given. - if _external is None: - _external = _scheme is not None - else: - app_ctx = _cv_app.get(None) - - # If called by helpers.url_for, an app context is active, - # use its url_adapter. Otherwise, app.url_for was called - # directly, build an adapter. - if app_ctx is not None: - url_adapter = app_ctx.url_adapter - else: - url_adapter = self.create_url_adapter(None) - - if url_adapter is None: - raise RuntimeError( - "Unable to build URLs outside an active request" - " without 'SERVER_NAME' configured. Also configure" - " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" - " needed." - ) - - # When outside a request, generate a URL with scheme and - # domain by default. - if _external is None: - _external = True - - # It is an error to set _scheme when _external=False, in order - # to avoid accidental insecure URLs. - if _scheme is not None and not _external: - raise ValueError("When specifying '_scheme', '_external' must be True.") - - self.inject_url_defaults(endpoint, values) - - try: - rv = url_adapter.build( # type: ignore[union-attr] - endpoint, - values, - method=_method, - url_scheme=_scheme, - force_external=_external, - ) - except BuildError as error: - values.update( - _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external - ) - return self.handle_url_build_error(error, endpoint, values) - - if _anchor is not None: - _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") - rv = f"{rv}#{_anchor}" - - return rv - - def redirect(self, location: str, code: int = 302) -> BaseResponse: - """Create a redirect response object. - - This is called by :func:`flask.redirect`, and can be called - directly as well. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - - .. versionadded:: 2.2 - Moved from ``flask.redirect``, which calls this method. - """ - return _wz_redirect(location, code=code, Response=self.response_class) - - def make_response(self, rv: ft.ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``list`` - A list that will be jsonify'd before being returned. - - ``generator`` or ``iterator`` - A generator that returns ``str`` or ``bytes`` to be - streamed as the response. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 2.2 - A generator will be converted to a streaming response. - A list will be converted to a JSON response. - - .. versionchanged:: 1.1 - A dict will be converted to a JSON response. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status = headers = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv # type: ignore[misc] - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv - else: - rv, status = rv # type: ignore[assignment,misc] - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, _abc_Iterator): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class( - rv, - status=status, - headers=headers, # type: ignore[arg-type] - ) - status = headers = None - elif isinstance(rv, (dict, list)): - rv = self.json.response(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type( - rv, request.environ # type: ignore[arg-type] - ) - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it" - f" was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it was a" - f" {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) # type: ignore[arg-type] - - return rv - - def create_url_adapter(self, request: Request | None) -> MapAdapter | None: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionadded:: 0.6 - - .. versionchanged:: 0.9 - This can now also be called without a request object when the - URL adapter is created for the application context. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - """ - if request is not None: - # If subdomain matching is disabled (the default), use the - # default subdomain in all cases. This should be the default - # in Werkzeug but it currently does not have that feature. - if not self.subdomain_matching: - subdomain = self.url_map.default_subdomain or None - else: - subdomain = None - - return self.url_map.bind_to_environ( - request.environ, - server_name=self.config["SERVER_NAME"], - subdomain=subdomain, - ) - # We need at the very least the server name to be set for this - # to work. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def inject_url_defaults(self, endpoint: str, values: dict) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[str | None] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: BuildError, endpoint: str, values: dict[str, t.Any] - ) -> str: - """Called by :meth:`.url_for` if a - :exc:`~werkzeug.routing.BuildError` was raised. If this returns - a value, it will be returned by ``url_for``, otherwise the error - will be re-raised. - - Each function in :attr:`url_build_error_handlers` is called with - ``error``, ``endpoint`` and ``values``. If a function returns - ``None`` or raises a ``BuildError``, it is skipped. Otherwise, - its return value is returned by ``url_for``. - - :param error: The active ``BuildError`` being handled. - :param endpoint: The endpoint being built. - :param values: The keyword arguments passed to ``url_for``. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error - - def preprocess_request(self) -> ft.ResponseReturnValue | None: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = request_ctx._get_current_object() # type: ignore[attr-defined] - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, exc: BaseException | None = _sentinel # type: ignore - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def do_teardown_appcontext( - self, exc: BaseException | None = _sentinel # type: ignore - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: dict) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with app.test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: BaseException | None = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if "werkzeug.debug.preserve_context" in environ: - environ["werkzeug.debug.preserve_context"](_cv_app.get()) - environ["werkzeug.debug.preserve_context"](_cv_request.get()) - - if error is not None and self.should_ignore_error(error): - error = None - - ctx.pop(error) - - def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/blueprints.py b/venv_flaskchat/lib/python3.11/site-packages/flask/blueprints.py deleted file mode 100644 index 0407f86..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/blueprints.py +++ /dev/null @@ -1,626 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from . import typing as ft -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: Blueprint, - app: Flask, - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: t.Callable | None = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - _got_registered_once = False - - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if not name: - raise ValueError("'name' may not be empty.") - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: list[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: list[tuple[Blueprint, dict]] = [] - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_registered_once: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called on the blueprint" - f" '{self.name}'. It has already been registered at least once, any" - " changes will not be applied consistently.\n" - "Make sure all imports, decorators, functions, etc. needed to set up" - " the blueprint are done before registering it." - ) - - @setupmethod - def record(self, func: t.Callable) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - self.deferred_functions.append(func) - - @setupmethod - def record_once(self, func: t.Callable) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: Flask, options: dict, first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: Flask, options: dict) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.3 - Nested blueprints now correctly apply subdomains. - - .. versionchanged:: 2.1 - Registering the same blueprint with the same name multiple - times is an error. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - bp_desc = "this" if app.blueprints[name] is self else "a different" - existing_at = f" '{name}'" if self_name != name else "" - - raise ValueError( - f"The name '{self_name}' is already registered for" - f" {bp_desc} blueprint{existing_at}. Use 'name=' to" - f" provide a unique name." - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - - def extend(bp_dict, parent_dict): - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: { - exc_class: func for exc_class, func in code_values.items() - } - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - bp_subdomain = bp_options.get("subdomain") - - if bp_subdomain is None: - bp_subdomain = blueprint.subdomain - - if state.subdomain is not None and bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain + "." + state.subdomain - elif bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain - elif state.subdomain is not None: - bp_options["subdomain"] = state.subdomain - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for - full documentation. - - The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, - used with :func:`url_for`, is prefixed with the blueprint's name. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - @setupmethod - def app_template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """Register a template filter, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a template filter, available in any template rendered by the - application. Works like the :meth:`app_template_filter` decorator. Equivalent to - :meth:`.Flask.add_template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """Register a template test, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_app_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a template test, available in any template rendered by the - application. Works like the :meth:`app_template_test` decorator. Equivalent to - :meth:`.Flask.add_template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """Register a template global, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_app_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a template global, available in any template rendered by the - application. Works like the :meth:`app_template_global` decorator. Equivalent to - :meth:`.Flask.add_template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def before_app_request(self, f: T_before_request) -> T_before_request: - """Like :meth:`before_request`, but before every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.before_request`. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def after_app_request(self, f: T_after_request) -> T_after_request: - """Like :meth:`after_request`, but after every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.after_request`. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def teardown_app_request(self, f: T_teardown) -> T_teardown: - """Like :meth:`teardown_request`, but after every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_context_processor( - self, f: T_template_context_processor - ) -> T_template_context_processor: - """Like :meth:`context_processor`, but for templates rendered by every view, not - only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_errorhandler( - self, code: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Like :meth:`errorhandler`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.errorhandler`. - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.record_once(lambda s: s.app.errorhandler(code)(f)) - return f - - return decorator - - @setupmethod - def app_url_value_preprocessor( - self, f: T_url_value_preprocessor - ) -> T_url_value_preprocessor: - """Like :meth:`url_value_preprocessor`, but for every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Like :meth:`url_defaults`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.url_defaults`. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/cli.py b/venv_flaskchat/lib/python3.11/site-packages/flask/cli.py deleted file mode 100644 index f7e1f29..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/cli.py +++ /dev/null @@ -1,1067 +0,0 @@ -from __future__ import annotations - -import ast -import inspect -import os -import platform -import re -import sys -import traceback -import typing as t -from functools import update_wrapper -from operator import itemgetter - -import click -from click.core import ParameterSource -from werkzeug import run_simple -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_load_dotenv - -if t.TYPE_CHECKING: - from .app import Flask - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(module): - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = app_factory() - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory '{attr_name}' in module '{module.__name__}'," - " but could not call it without arguments. Use" - f" '{module.__name__}:{attr_name}(args)'" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify one." - ) - - -def _called_with_wrong_args(f): - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(module, app_name): - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = [] - kwargs = {} - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = attr(*args, **kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path): - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -def locate_app(module_name, app_name, raise_if_not_found=True): - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: - raise NoAppException( - f"While importing {module_name!r}, an ImportError was" - f" raised:\n\n{traceback.format_exc()}" - ) from None - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from None - else: - return - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(module) - else: - return find_app_by_string(module, app_name) - - -def get_version(ctx, param, value): - if not value or ctx.resilient_parsing: - return - - import werkzeug - from . import __version__ - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {__version__}\n" - f"Werkzeug {werkzeug.__version__}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the Flask version.", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - """ - - def __init__( - self, - app_import_path: str | None = None, - create_app: t.Callable[..., Flask] | None = None, - set_debug_flag: bool = True, - ) -> None: - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data: dict[t.Any, t.Any] = {} - self.set_debug_flag = set_debug_flag - self._loaded_app: Flask | None = None - - def load_app(self) -> Flask: - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - if self._loaded_app is not None: - return self._loaded_app - - if self.create_app is not None: - app = self.create_app() - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, 1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(import_name, None, raise_if_not_found=False) - - if app: - break - - if not app: - raise NoAppException( - "Could not locate a Flask application. Use the" - " 'flask --app' option, 'FLASK_APP' environment" - " variable, or a 'wsgi.py' or 'app.py' file in the" - " current directory." - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - - -def with_appcontext(f): - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. - - Custom commands (and their options) registered under ``app.cli`` or - ``blueprint.cli`` will always have an app context available, this - decorator is not required in that case. - - .. versionchanged:: 2.2 - The app context is active for subcommands as well as the - decorated callback. The app context is always available to - ``app.cli`` command and parameter callbacks. - """ - - @click.pass_context - def decorator(__ctx, *args, **kwargs): - if not current_app: - app = __ctx.ensure_object(ScriptInfo).load_app() - __ctx.with_resource(app.app_context()) - - return __ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f): - if wrap_for_ctx: - f = with_appcontext(f) - return click.Group.command(self, *args, **kwargs)(f) - - return decorator - - def group(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return click.Group.group(self, *args, **kwargs) - - -def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: - if value is None: - return None - - info = ctx.ensure_object(ScriptInfo) - info.app_import_path = value - return value - - -# This option is eager so the app will be available if --help is given. -# --help is also eager, so --app must be before it in the param list. -# no_args_is_help bypasses eager processing, so this option must be -# processed manually in that case to ensure FLASK_APP gets picked up. -_app_option = click.Option( - ["-A", "--app"], - metavar="IMPORT", - help=( - "The Flask application or factory function to load, in the form 'module:name'." - " Module can be a dotted import or file path. Name is not required if it is" - " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" - " pass arguments." - ), - is_eager=True, - expose_value=False, - callback=_set_app, -) - - -def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: - # If the flag isn't provided, it will default to False. Don't use - # that, let debug be set by env in that case. - source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] - - if source is not None and source in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ): - return None - - # Set with env var instead of ScriptInfo.load so that it can be - # accessed early during a factory function. - os.environ["FLASK_DEBUG"] = "1" if value else "0" - return value - - -_debug_option = click.Option( - ["--debug/--no-debug"], - help="Set debug mode.", - expose_value=False, - callback=_set_debug, -) - - -def _env_file_callback( - ctx: click.Context, param: click.Option, value: str | None -) -> str | None: - if value is None: - return None - - import importlib - - try: - importlib.import_module("dotenv") - except ImportError: - raise click.BadParameter( - "python-dotenv must be installed to load an env file.", - ctx=ctx, - param=param, - ) from None - - # Don't check FLASK_SKIP_DOTENV, that only disables automatically - # loading .env and .flaskenv files. - load_dotenv(value) - return value - - -# This option is eager so env vars are loaded as early as possible to be -# used by other options. -_env_file_option = click.Option( - ["-e", "--env-file"], - type=click.Path(exists=True, dir_okay=False), - help="Load environment variables from this file. python-dotenv must be installed.", - is_eager=True, - expose_value=False, - callback=_env_file_callback, -) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag. - - .. versionchanged:: 2.2 - Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. - - .. versionchanged:: 2.2 - An app context is pushed when running ``app.cli`` commands, so - ``@with_appcontext`` is no longer required for those commands. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands: bool = True, - create_app: t.Callable[..., Flask] | None = None, - add_version_option: bool = True, - load_dotenv: bool = True, - set_debug_flag: bool = True, - **extra: t.Any, - ) -> None: - params = list(extra.pop("params", None) or ()) - # Processing is done with option callbacks instead of a group - # callback. This allows users to make a custom group callback - # without losing the behavior. --env-file must come first so - # that it is eagerly evaluated before --app. - params.extend((_env_file_option, _app_option, _debug_option)) - - if add_version_option: - params.append(version_option) - - if "context_settings" not in extra: - extra["context_settings"] = {} - - extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") - - super().__init__(params=params, **extra) - - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self): - if self._loaded_plugin_commands: - return - - if sys.version_info >= (3, 10): - from importlib import metadata - else: - # Use a backport on Python < 3.10. We technically have - # importlib.metadata on 3.8+, but the API changed in 3.10, - # so use the backport for consistency. - import importlib_metadata as metadata - - for ep in metadata.entry_points(group="flask.commands"): - self.add_command(ep.load(), ep.name) - - self._loaded_plugin_commands = True - - def get_command(self, ctx, name): - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - app = info.load_app() - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - return None - - # Push an app context for the loaded app unless it is already - # active somehow. This makes the context available to parameter - # and command callbacks without needing @with_appcontext. - if not current_app or current_app._get_current_object() is not app: - ctx.with_resource(app.app_context()) - - return app.cli.get_command(ctx, name) - - def list_commands(self, ctx): - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: click.Context | None = None, - **extra: t.Any, - ) -> click.Context: - # Set a flag to tell app.run to become a no-op. If app.run was - # not in a __name__ == __main__ guard, it would start the server - # when importing, blocking whatever command is being called. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - # Attempt to load .env and .flask env files. The --env-file - # option can cause another file to be loaded. - if get_load_dotenv(self.load_dotenv): - load_dotenv() - - if "obj" not in extra and "obj" not in self.context_settings: - extra["obj"] = ScriptInfo( - create_app=self.create_app, set_debug_flag=self.set_debug_flag - ) - - return super().make_context(info_name, args, parent=parent, **extra) - - def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help: - # Attempt to load --env-file and --app early in case they - # were given as env vars. Otherwise no_args_is_help will not - # see commands from app.cli. - _env_file_option.handle_parse_result(ctx, {}, []) - _app_option.handle_parse_result(ctx, {}, []) - - return super().parse_args(ctx, args) - - -def _path_is_ancestor(path, other): - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv(path: str | os.PathLike | None = None) -> bool: - """Load "dotenv" files in order of precedence to set environment variables. - - If an env var is already set it is not overwritten, so earlier files in the - list are preferred over later files. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location instead of searching. - :return: ``True`` if a file was loaded. - - .. versionchanged:: 2.0 - The current directory is not changed to the location of the - loaded file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - try: - import dotenv - except ImportError: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env or .flaskenv files present." - ' Do "pip install python-dotenv" to use them.', - fg="yellow", - err=True, - ) - - return False - - # Always return after attempting to load a given path, don't load - # the default files. - if path is not None: - if os.path.isfile(path): - return dotenv.load_dotenv(path, encoding="utf-8") - - return False - - loaded = False - - for name in (".env", ".flaskenv"): - path = dotenv.find_dotenv(name, usecwd=True) - - if not path: - continue - - dotenv.load_dotenv(path, encoding="utf-8") - loaded = True - - return loaded # True if at least one file was located and loaded. - - -def show_server_banner(debug, app_import_path): - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if is_running_from_reloader(): - return - - if app_import_path is not None: - click.echo(f" * Serving Flask app '{app_import_path}'") - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self): - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert(self, value, param, ctx): - try: - import ssl - except ImportError: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) from None - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx, param, value): - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - try: - import ssl - except ImportError: - is_context = False - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert(self, value, param, ctx): - items = self.split_envvar_value(value) - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", - type=CertParamType(), - help="Specify a certificate file to use HTTPS.", - is_eager=True, -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@click.option( - "--exclude-patterns", - default=None, - type=SeparatedPathType(), - help=( - "Files matching these fnmatch patterns will not trigger a reload" - " on change. Multiple patterns are separated by" - f" {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info, - host, - port, - reload, - debugger, - with_threads, - cert, - extra_files, - exclude_patterns, -): - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default with the '--debug' - option. - """ - try: - app = info.load_app() - except Exception as e: - if is_running_from_reloader(): - # When reloading, print out the error immediately, but raise - # it later so the debugger or server can handle it. - traceback.print_exc() - err = e - - def app(environ, start_response): - raise err from None - - else: - # When not reloading, raise the error immediately so the - # command fails. - raise e from None - - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(debug, info.app_import_path) - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - ) - - -run_command.params.insert(0, _debug_option) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {current_app.import_name}\n" - f"Instance: {current_app.instance_path}" - ) - ctx: dict = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(current_app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), - default="endpoint", - help=( - "Method to sort routes by. 'match' is the order that Flask will match routes" - " when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - rules = list(current_app.url_map.iter_rules()) - - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} - host_matching = current_app.url_map.host_matching - has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) - rows = [] - - for rule in rules: - row = [ - rule.endpoint, - ", ".join(sorted((rule.methods or set()) - ignored_methods)), - ] - - if has_domain: - row.append((rule.host if host_matching else rule.subdomain) or "") - - row.append(rule.rule) - rows.append(row) - - headers = ["Endpoint", "Methods"] - sorts = ["endpoint", "methods"] - - if has_domain: - headers.append("Host" if host_matching else "Subdomain") - sorts.append("domain") - - headers.append("Rule") - sorts.append("rule") - - try: - rows.sort(key=itemgetter(sorts.index(sort))) - except ValueError: - pass - - rows.insert(0, headers) - widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] - rows.insert(1, ["-" * w for w in widths]) - template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) - - for row in rows: - click.echo(template.format(*row)) - - -cli = FlaskGroup( - name="flask", - help="""\ -A general utility script for Flask applications. - -An application to load must be given with the '--app' option, -'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file -in the current directory. -""", -) - - -def main() -> None: - cli.main() - - -if __name__ == "__main__": - main() diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/config.py b/venv_flaskchat/lib/python3.11/site-packages/flask/config.py deleted file mode 100644 index a73dd78..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/config.py +++ /dev/null @@ -1,345 +0,0 @@ -from __future__ import annotations - -import errno -import json -import os -import types -import typing as t - -from werkzeug.utils import import_string - - -class ConfigAttribute: - """Makes an attribute forward to the config""" - - def __init__(self, name: str, get_converter: t.Callable | None = None) -> None: - self.__name__ = name - self.get_converter = get_converter - - def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any: - if obj is None: - return self - rv = obj.config[self.__name__] - if self.get_converter is not None: - rv = self.get_converter(rv) - return rv - - def __set__(self, obj: t.Any, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__(self, root_path: str, defaults: dict | None = None) -> None: - super().__init__(defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_prefixed_env( - self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads - ) -> bool: - """Load any environment variables that start with ``FLASK_``, - dropping the prefix from the env key for the config key. Values - are passed through a loading function to attempt to convert them - to more specific types than strings. - - Keys are loaded in :func:`sorted` order. - - The default loading function attempts to parse values as any - valid JSON type, including dicts and lists. - - Specific items in nested dicts can be set by separating the - keys with double underscores (``__``). If an intermediate key - doesn't exist, it will be initialized to an empty dict. - - :param prefix: Load env vars that start with this prefix, - separated with an underscore (``_``). - :param loads: Pass each string value to this function and use - the returned value as the config value. If any error is - raised it is ignored and the value remains a string. The - default is :func:`json.loads`. - - .. versionadded:: 2.1 - """ - prefix = f"{prefix}_" - len_prefix = len(prefix) - - for key in sorted(os.environ): - if not key.startswith(prefix): - continue - - value = os.environ[key] - - try: - value = loads(value) - except Exception: - # Keep the value as a string if loading failed. - pass - - # Change to key.removeprefix(prefix) on Python >= 3.9. - key = key[len_prefix:] - - if "__" not in key: - # A non-nested key, set directly. - self[key] = value - continue - - # Traverse nested dictionaries with keys separated by "__". - current = self - *parts, tail = key.split("__") - - for part in parts: - # If an intermediate dict does not exist, create it. - if part not in current: - current[part] = {} - - current = current[part] - - current[tail] = value - - return True - - def from_pyfile(self, filename: str, silent: bool = False) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: object | str) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str, - load: t.Callable[[t.IO[t.Any]], t.Mapping], - silent: bool = False, - text: bool = True, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import json - app.config.from_file("config.json", load=json.load) - - import tomllib - app.config.from_file("config.toml", load=tomllib.load, text=False) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :param text: Open the file in text or binary mode. - :return: ``True`` if the file was loaded successfully. - - .. versionchanged:: 2.3 - The ``text`` parameter was added. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename, "r" if text else "rb") as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_mapping( - self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with - non-upper keys. - - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/ctx.py b/venv_flaskchat/lib/python3.11/site-packages/flask/ctx.py deleted file mode 100644 index b37e4e0..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/ctx.py +++ /dev/null @@ -1,440 +0,0 @@ -from __future__ import annotations - -import contextvars -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from . import typing as ft -from .globals import _cv_app -from .globals import _cv_request -from .signals import appcontext_popped -from .signals import appcontext_pushed - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Any | None = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - ctx = _cv_app.get(None) - if ctx is not None: - return f"" - return object.__repr__(self) - - -def after_this_request(f: ft.AfterRequestCallable) -> ft.AfterRequestCallable: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'after_this_request' can only be used when a request" - " context is active, such as in a view function." - ) - - ctx._after_request_functions.append(f) - return f - - -def copy_current_request_context(f: t.Callable) -> t.Callable: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'copy_current_request_context' can only be used when a" - " request context is active, such as in a view function." - ) - - ctx = ctx.copy() - - def wrapper(*args, **kwargs): - with ctx: - return ctx.app.ensure_sync(f)(*args, **kwargs) - - return update_wrapper(wrapper, f) - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _cv_request.get(None) is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _cv_app.get(None) is not None - - -class AppContext: - """The app context contains application-specific information. An app - context is created and pushed at the beginning of each request if - one is not already active. An app context is also pushed when - running CLI commands. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g: _AppCtxGlobals = app.app_ctx_globals_class() - self._cv_tokens: list[contextvars.Token] = [] - - def push(self) -> None: - """Binds the app context to the current context.""" - self._cv_tokens.append(_cv_app.set(self)) - appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - if len(self._cv_tokens) == 1: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - ctx = _cv_app.get() - _cv_app.reset(self._cv_tokens.pop()) - - if ctx is not self: - raise AssertionError( - f"Popped wrong app context. ({ctx!r} instead of {self!r})" - ) - - appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) - - def __enter__(self) -> AppContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains per-request information. The Flask - app creates and pushes it at the beginning of the request, then pops - it at the end of the request. It will create the URL adapter and - request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the - request. When using the interactive debugger, the context will be - restored so ``request`` is still accessible. Similarly, the test - client can preserve the context after the request ends. However, - teardown functions may already have closed some resources such as - database connections. - """ - - def __init__( - self, - app: Flask, - environ: dict, - request: Request | None = None, - session: SessionMixin | None = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - request.json_module = app.json - self.request: Request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes: list[tuple[str, str]] | None = None - self.session: SessionMixin | None = session - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: list[ft.AfterRequestCallable] = [] - - self._cv_tokens: list[tuple[contextvars.Token, AppContext | None]] = [] - - def copy(self) -> RequestContext: - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _cv_app.get(None) - - if app_ctx is None or app_ctx.app is not self.app: - app_ctx = self.app.app_context() - app_ctx.push() - else: - app_ctx = None - - self._cv_tokens.append((_cv_request.set(self), app_ctx)) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - clear_request = len(self._cv_tokens) == 1 - - try: - if clear_request: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - finally: - ctx = _cv_request.get() - token, app_ctx = self._cv_tokens.pop() - _cv_request.reset(token) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - ctx.request.environ["werkzeug.request"] = None - - if app_ctx is not None: - app_ctx.pop(exc) - - if ctx is not self: - raise AssertionError( - f"Popped wrong request context. ({ctx!r} instead of {self!r})" - ) - - def __enter__(self) -> RequestContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/debughelpers.py b/venv_flaskchat/lib/python3.11/site-packages/flask/debughelpers.py deleted file mode 100644 index 6061441..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,160 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .app import Flask -from .blueprints import Blueprint -from .globals import request_ctx - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request, key): - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self): - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised in debug mode if a routing redirect - would cause the browser to drop the method or body. This happens - when method is not GET, HEAD or OPTIONS and the status code is not - 307 or 308. - """ - - def __init__(self, request): - exc = request.routing_exception - buf = [ - f"A request was sent to '{request.url}', but routing issued" - f" a redirect to the canonical URL '{exc.new_url}'." - ] - - if f"{request.base_url}/" == exc.new_url.partition("?")[0]: - buf.append( - " The URL was defined with a trailing slash. Flask" - " will redirect to the URL with a trailing slash if it" - " was accessed without one." - ) - - buf.append( - " Send requests to the canonical URL, or use 307 or 308 for" - " routing redirects. Otherwise, browsers will drop form" - " data.\n\n" - "This exception is only raised in debug mode." - ) - super().__init__("".join(buf)) - - -def attach_enctype_error_multidict(request): - """Patch ``request.files.__getitem__`` to raise a descriptive error - about ``enctype=multipart/form-data``. - - :param request: The request to patch. - :meta private: - """ - oldcls = request.files.__class__ - - class newcls(oldcls): - def __getitem__(self, key): - try: - return super().__getitem__(key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key).with_traceback( - e.__traceback__ - ) from None - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader) -> t.Generator: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts(app: Flask, template, attempts) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - if request_ctx and request_ctx.request.blueprint is not None: - blueprint = request_ctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, Flask): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/globals.py b/venv_flaskchat/lib/python3.11/site-packages/flask/globals.py deleted file mode 100644 index e9cd4ac..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/globals.py +++ /dev/null @@ -1,96 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextvars import ContextVar - -from werkzeug.local import LocalProxy - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .ctx import _AppCtxGlobals - from .ctx import AppContext - from .ctx import RequestContext - from .sessions import SessionMixin - from .wrappers import Request - - -class _FakeStack: - def __init__(self, name: str, cv: ContextVar[t.Any]) -> None: - self.name = name - self.cv = cv - - @property - def top(self) -> t.Any | None: - import warnings - - warnings.warn( - f"'_{self.name}_ctx_stack' is deprecated and will be removed in Flask 2.4." - f" Use 'g' to store data, or '{self.name}_ctx' to access the current" - " context.", - DeprecationWarning, - stacklevel=2, - ) - return self.cv.get(None) - - -_no_app_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -the current application. To solve this, set up an application context -with app.app_context(). See the documentation for more information.\ -""" -_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") -__app_ctx_stack = _FakeStack("app", _cv_app) -app_ctx: AppContext = LocalProxy( # type: ignore[assignment] - _cv_app, unbound_message=_no_app_msg -) -current_app: Flask = LocalProxy( # type: ignore[assignment] - _cv_app, "app", unbound_message=_no_app_msg -) -g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] - _cv_app, "g", unbound_message=_no_app_msg -) - -_no_req_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") -__request_ctx_stack = _FakeStack("request", _cv_request) -request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] - _cv_request, unbound_message=_no_req_msg -) -request: Request = LocalProxy( # type: ignore[assignment] - _cv_request, "request", unbound_message=_no_req_msg -) -session: SessionMixin = LocalProxy( # type: ignore[assignment] - _cv_request, "session", unbound_message=_no_req_msg -) - - -def __getattr__(name: str) -> t.Any: - if name == "_app_ctx_stack": - import warnings - - warnings.warn( - "'_app_ctx_stack' is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - return __app_ctx_stack - - if name == "_request_ctx_stack": - import warnings - - warnings.warn( - "'_request_ctx_stack' is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - return __request_ctx_stack - - raise AttributeError(name) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/helpers.py b/venv_flaskchat/lib/python3.11/site-packages/flask/helpers.py deleted file mode 100644 index 61a0f81..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/helpers.py +++ /dev/null @@ -1,693 +0,0 @@ -from __future__ import annotations - -import os -import pkgutil -import socket -import sys -import typing as t -import warnings -from datetime import datetime -from functools import lru_cache -from functools import update_wrapper -from threading import RLock - -import werkzeug.utils -from werkzeug.exceptions import abort as _wz_abort -from werkzeug.utils import redirect as _wz_redirect - -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .globals import request_ctx -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.wrappers import Response as BaseResponse - from .wrappers import Response - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated by the - :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. - """ - val = os.environ.get("FLASK_DEBUG") - return bool(val and val.lower() not in {"0", "false", "no"}) - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading default dotenv files by - setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load - the files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -def stream_with_context( - generator_or_function: ( - t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]] - ) -) -> t.Iterator[t.AnyStr]: - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore - - def generator() -> t.Generator: - ctx = _cv_request.get(None) - if ctx is None: - raise RuntimeError( - "'stream_with_context' can only be used when a request" - " context is active, such as in a view function." - ) - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - yield from gen - finally: - if hasattr(gen, "close"): - gen.close() - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args: t.Any) -> Response: - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) # type: ignore - - -def url_for( - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, -) -> str: - """Generate a URL to the given endpoint with the given values. - - This requires an active request or application context, and calls - :meth:`current_app.url_for() `. See that method - for full documentation. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it is - external. - :param _external: If given, prefer the URL to be internal (False) or - require it to be external (True). External URLs include the - scheme and domain. When not in an active request, URLs are - external by default. - :param values: Values to use for the variable parts of the URL rule. - Unknown keys are appended as query string arguments, like - ``?a=b&c=d``. - - .. versionchanged:: 2.2 - Calls ``current_app.url_for``, allowing an app to override the - behavior. - - .. versionchanged:: 0.10 - The ``_scheme`` parameter was added. - - .. versionchanged:: 0.9 - The ``_anchor`` and ``_method`` parameters were added. - - .. versionchanged:: 0.9 - Calls ``app.handle_url_build_error`` on build errors. - """ - return current_app.url_for( - endpoint, - _anchor=_anchor, - _method=_method, - _scheme=_scheme, - _external=_external, - **values, - ) - - -def redirect( - location: str, code: int = 302, Response: type[BaseResponse] | None = None -) -> BaseResponse: - """Create a redirect response object. - - If :data:`~flask.current_app` is available, it will use its - :meth:`~flask.Flask.redirect` method, otherwise it will use - :func:`werkzeug.utils.redirect`. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - :param Response: The response class to use. Not used when - ``current_app`` is active, which uses ``app.response_class``. - - .. versionadded:: 2.2 - Calls ``current_app.redirect`` if available instead of always - using Werkzeug's default ``redirect``. - """ - if current_app: - return current_app.redirect(location, code=code) - - return _wz_redirect(location, code=code, Response=Response) - - -def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given - status code. - - If :data:`~flask.current_app` is available, it will call its - :attr:`~flask.Flask.aborter` object, otherwise it will use - :func:`werkzeug.exceptions.abort`. - - :param code: The status code for the exception, which must be - registered in ``app.aborter``. - :param args: Passed to the exception. - :param kwargs: Passed to the exception. - - .. versionadded:: 2.2 - Calls ``current_app.aborter`` if available instead of always - using Werkzeug's default ``abort``. - """ - if current_app: - current_app.aborter(code, *args, **kwargs) - - _wz_abort(code, *args, **kwargs) - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - app = current_app._get_current_object() # type: ignore - message_flashed.send( - app, - _async_wrapper=app.ensure_sync, - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> list[str] | list[tuple[str, str]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = request_ctx.flashes - if flashes is None: - flashes = session.pop("_flashes") if "_flashes" in session else [] - request_ctx.flashes = flashes - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: - if kwargs.get("max_age") is None: - kwargs["max_age"] = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - use_x_sendfile=current_app.config["USE_X_SENDFILE"], - response_class=current_app.response_class, - _root_path=current_app.root_path, # type: ignore - ) - return kwargs - - -def send_file( - path_or_file: os.PathLike | str | t.BinaryIO, - mimetype: str | None = None, - as_attachment: bool = False, - download_name: str | None = None, - conditional: bool = True, - etag: bool | str = True, - last_modified: datetime | int | float | None = None, - max_age: None | (int | t.Callable[[str | None], int | None]) = None, -) -> Response: - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - deprecated because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( # type: ignore[return-value] - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - conditional=conditional, - etag=etag, - last_modified=last_modified, - max_age=max_age, - ) - ) - - -def send_from_directory( - directory: os.PathLike | str, - path: os.PathLike | str, - **kwargs: t.Any, -) -> Response: - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under, - relative to the current application's root path. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - return werkzeug.utils.send_from_directory( # type: ignore[return-value] - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - loader = pkgutil.get_loader(import_name) - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None or import_name == "__main__": - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) - - -class locked_cached_property(werkzeug.utils.cached_property): - """A :func:`property` that is only evaluated once. Like - :class:`werkzeug.utils.cached_property` except access uses a lock - for thread safety. - - .. deprecated:: 2.3 - Will be removed in Flask 2.4. Use a lock inside the decorated function if - locking is needed. - - .. versionchanged:: 2.0 - Inherits from Werkzeug's ``cached_property`` (and ``property``). - """ - - def __init__( - self, - fget: t.Callable[[t.Any], t.Any], - name: str | None = None, - doc: str | None = None, - ) -> None: - import warnings - - warnings.warn( - "'locked_cached_property' is deprecated and will be removed in Flask 2.4." - " Use a lock inside the decorated function if locking is needed.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(fget, name=name, doc=doc) - self.lock = RLock() - - def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore - if obj is None: - return self - - with self.lock: - return super().__get__(obj, type=type) - - def __set__(self, obj: object, value: t.Any) -> None: - with self.lock: - super().__set__(obj, value) - - def __delete__(self, obj: object) -> None: - with self.lock: - super().__delete__(obj) - - -def is_ip(value: str) -> bool: - """Determine if the given string is an IP address. - - :param value: value to check - :type value: str - - :return: True if string is an IP address - :rtype: bool - - .. deprecated:: 2.3 - Will be removed in Flask 2.4. - """ - warnings.warn( - "The 'is_ip' function is deprecated and will be removed in Flask 2.4.", - DeprecationWarning, - stacklevel=2, - ) - - for family in (socket.AF_INET, socket.AF_INET6): - try: - socket.inet_pton(family, value) - except OSError: - pass - else: - return True - - return False - - -@lru_cache(maxsize=None) -def _split_blueprint_path(name: str) -> list[str]: - out: list[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/flask/json/__init__.py deleted file mode 100644 index f15296f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - -from ..globals import current_app -from .provider import _default - -if t.TYPE_CHECKING: # pragma: no cover - from ..wrappers import Response - - -def dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dumps() ` - method, otherwise it will use :func:`json.dumps`. - - :param obj: The data to serialize. - :param kwargs: Arguments passed to the ``dumps`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dumps``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.dumps(obj, **kwargs) - - kwargs.setdefault("default", _default) - return _json.dumps(obj, **kwargs) - - -def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dump() ` - method, otherwise it will use :func:`json.dump`. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: Arguments passed to the ``dump`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dump``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, will be - removed in Flask 2.1. - """ - if current_app: - current_app.json.dump(obj, fp, **kwargs) - else: - kwargs.setdefault("default", _default) - _json.dump(obj, fp, **kwargs) - - -def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.loads() ` - method, otherwise it will use :func:`json.loads`. - - :param s: Text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``loads`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.loads``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The data must be a - string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.loads(s, **kwargs) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.load() ` - method, otherwise it will use :func:`json.load`. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``load`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.load``, allowing an app to override - the behavior. - - .. versionchanged:: 2.2 - The ``app`` parameter will be removed in Flask 2.3. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The file must be text - mode, or binary mode with UTF-8 bytes. - """ - if current_app: - return current_app.json.load(fp, **kwargs) - - return _json.load(fp, **kwargs) - - -def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. A dict or list returned from a view will be converted to a - JSON response automatically without needing to call this. - - This requires an active request or application context, and calls - :meth:`app.json.response() `. - - In debug mode, the output is formatted with indentation to make it - easier to read. This may also be controlled by the provider. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - - .. versionchanged:: 2.2 - Calls ``current_app.json.response``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 0.11 - Added support for serializing top-level arrays. This was a - security risk in ancient browsers. See :ref:`security-json`. - - .. versionadded:: 0.2 - """ - return current_app.json.response(*args, **kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index a1f4b20..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/provider.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/provider.cpython-311.pyc deleted file mode 100644 index 4152901..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/provider.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/tag.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/tag.cpython-311.pyc deleted file mode 100644 index 92bae20..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask/json/__pycache__/tag.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/provider.py b/venv_flaskchat/lib/python3.11/site-packages/flask/json/provider.py deleted file mode 100644 index 0edd3d5..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/json/provider.py +++ /dev/null @@ -1,216 +0,0 @@ -from __future__ import annotations - -import dataclasses -import decimal -import json -import typing as t -import uuid -import weakref -from datetime import date - -from werkzeug.http import http_date - -if t.TYPE_CHECKING: # pragma: no cover - from ..app import Flask - from ..wrappers import Response - - -class JSONProvider: - """A standard set of JSON operations for an application. Subclasses - of this can be used to customize JSON behavior or use different - JSON libraries. - - To implement a provider for a specific library, subclass this base - class and implement at least :meth:`dumps` and :meth:`loads`. All - other methods have default implementations. - - To use a different provider, either subclass ``Flask`` and set - :attr:`~flask.Flask.json_provider_class` to a provider class, or set - :attr:`app.json ` to an instance of the class. - - :param app: An application instance. This will be stored as a - :class:`weakref.proxy` on the :attr:`_app` attribute. - - .. versionadded:: 2.2 - """ - - def __init__(self, app: Flask) -> None: - self._app = weakref.proxy(app) - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - :param obj: The data to serialize. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: May be passed to the underlying JSON library. - """ - fp.write(self.dumps(obj, **kwargs)) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - :param s: Text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - return self.loads(fp.read(), **kwargs) - - def _prepare_response_obj( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> t.Any: - if args and kwargs: - raise TypeError("app.json.response() takes either args or kwargs, not both") - - if not args and not kwargs: - return None - - if len(args) == 1: - return args[0] - - return args or kwargs - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. - - The :func:`~flask.json.jsonify` function calls this method for - the current application. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - return self._app.response_class(self.dumps(obj), mimetype="application/json") - - -def _default(o: t.Any) -> t.Any: - if isinstance(o, date): - return http_date(o) - - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - - if hasattr(o, "__html__"): - return str(o.__html__()) - - raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") - - -class DefaultJSONProvider(JSONProvider): - """Provide JSON operations using Python's built-in :mod:`json` - library. Serializes the following additional data types: - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - """ - - default: t.Callable[[t.Any], t.Any] = staticmethod( - _default - ) # type: ignore[assignment] - """Apply this function to any object that :meth:`json.dumps` does - not know how to serialize. It should return a valid JSON type or - raise a ``TypeError``. - """ - - ensure_ascii = True - """Replace non-ASCII characters with escape sequences. This may be - more compatible with some clients, but can be disabled for better - performance and size. - """ - - sort_keys = True - """Sort the keys in any serialized dicts. This may be useful for - some caching situations, but can be disabled for better performance. - When enabled, keys must all be strings, they are not converted - before sorting. - """ - - compact: bool | None = None - """If ``True``, or ``None`` out of debug mode, the :meth:`response` - output will not add indentation, newlines, or spaces. If ``False``, - or ``None`` in debug mode, it will use a non-compact representation. - """ - - mimetype = "application/json" - """The mimetype set in :meth:`response`.""" - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON to a string. - - Keyword arguments are passed to :func:`json.dumps`. Sets some - parameter defaults from the :attr:`default`, - :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. - - :param obj: The data to serialize. - :param kwargs: Passed to :func:`json.dumps`. - """ - kwargs.setdefault("default", self.default) - kwargs.setdefault("ensure_ascii", self.ensure_ascii) - kwargs.setdefault("sort_keys", self.sort_keys) - return json.dumps(obj, **kwargs) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON from a string or bytes. - - :param s: Text or UTF-8 bytes. - :param kwargs: Passed to :func:`json.loads`. - """ - return json.loads(s, **kwargs) - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with it. The response mimetype - will be "application/json" and can be changed with - :attr:`mimetype`. - - If :attr:`compact` is ``False`` or debug mode is enabled, the - output will be formatted to be easier to read. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - dump_args: dict[str, t.Any] = {} - - if (self.compact is None and self._app.debug) or self.compact is False: - dump_args.setdefault("indent", 2) - else: - dump_args.setdefault("separators", (",", ":")) - - return self._app.response_class( - f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype - ) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/json/tag.py b/venv_flaskchat/lib/python3.11/site-packages/flask/json/tag.py deleted file mode 100644 index 91cc441..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/json/tag.py +++ /dev/null @@ -1,314 +0,0 @@ -""" -Tagged JSON -~~~~~~~~~~~ - -A compact representation for lossless serialization of non-standard JSON -types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this -to serialize the session data, but it may be useful in other places. It -can be extended to support other types. - -.. autoclass:: TaggedJSONSerializer - :members: - -.. autoclass:: JSONTag - :members: - -Let's see an example that adds support for -:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so -to handle this we will dump the items as a list of ``[key, value]`` -pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to -identify the type. The session serializer processes dicts first, so -insert the new tag at the front of the order since ``OrderedDict`` must -be processed before ``dict``. - -.. code-block:: python - - from flask.json.tag import JSONTag - - class TagOrderedDict(JSONTag): - __slots__ = ('serializer',) - key = ' od' - - def check(self, value): - return isinstance(value, OrderedDict) - - def to_json(self, value): - return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] - - def to_python(self, value): - return OrderedDict(value) - - app.session_interface.serializer.register(TagOrderedDict, index=0) -""" -from __future__ import annotations - -import typing as t -from base64 import b64decode -from base64 import b64encode -from datetime import datetime -from uuid import UUID - -from markupsafe import Markup -from werkzeug.http import http_date -from werkzeug.http import parse_date - -from ..json import dumps -from ..json import loads - - -class JSONTag: - """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" - - __slots__ = ("serializer",) - - #: The tag to mark the serialized object with. If ``None``, this tag is - #: only used as an intermediate step during tagging. - key: str | None = None - - def __init__(self, serializer: TaggedJSONSerializer) -> None: - """Create a tagger for the given serializer.""" - self.serializer = serializer - - def check(self, value: t.Any) -> bool: - """Check if the given value should be tagged by this tag.""" - raise NotImplementedError - - def to_json(self, value: t.Any) -> t.Any: - """Convert the Python object to an object that is a valid JSON type. - The tag will be added later.""" - raise NotImplementedError - - def to_python(self, value: t.Any) -> t.Any: - """Convert the JSON representation back to the correct type. The tag - will already be removed.""" - raise NotImplementedError - - def tag(self, value: t.Any) -> t.Any: - """Convert the value to a valid JSON type and add the tag structure - around it.""" - return {self.key: self.to_json(value)} - - -class TagDict(JSONTag): - """Tag for 1-item dicts whose only key matches a registered tag. - - Internally, the dict key is suffixed with `__`, and the suffix is removed - when deserializing. - """ - - __slots__ = () - key = " di" - - def check(self, value: t.Any) -> bool: - return ( - isinstance(value, dict) - and len(value) == 1 - and next(iter(value)) in self.serializer.tags - ) - - def to_json(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {f"{key}__": self.serializer.tag(value[key])} - - def to_python(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {key[:-2]: value[key]} - - -class PassDict(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, dict) - - def to_json(self, value: t.Any) -> t.Any: - # JSON objects may only have string keys, so don't bother tagging the - # key here. - return {k: self.serializer.tag(v) for k, v in value.items()} - - tag = to_json - - -class TagTuple(JSONTag): - __slots__ = () - key = " t" - - def check(self, value: t.Any) -> bool: - return isinstance(value, tuple) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - def to_python(self, value: t.Any) -> t.Any: - return tuple(value) - - -class PassList(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, list) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - tag = to_json - - -class TagBytes(JSONTag): - __slots__ = () - key = " b" - - def check(self, value: t.Any) -> bool: - return isinstance(value, bytes) - - def to_json(self, value: t.Any) -> t.Any: - return b64encode(value).decode("ascii") - - def to_python(self, value: t.Any) -> t.Any: - return b64decode(value) - - -class TagMarkup(JSONTag): - """Serialize anything matching the :class:`~markupsafe.Markup` API by - having a ``__html__`` method to the result of that method. Always - deserializes to an instance of :class:`~markupsafe.Markup`.""" - - __slots__ = () - key = " m" - - def check(self, value: t.Any) -> bool: - return callable(getattr(value, "__html__", None)) - - def to_json(self, value: t.Any) -> t.Any: - return str(value.__html__()) - - def to_python(self, value: t.Any) -> t.Any: - return Markup(value) - - -class TagUUID(JSONTag): - __slots__ = () - key = " u" - - def check(self, value: t.Any) -> bool: - return isinstance(value, UUID) - - def to_json(self, value: t.Any) -> t.Any: - return value.hex - - def to_python(self, value: t.Any) -> t.Any: - return UUID(value) - - -class TagDateTime(JSONTag): - __slots__ = () - key = " d" - - def check(self, value: t.Any) -> bool: - return isinstance(value, datetime) - - def to_json(self, value: t.Any) -> t.Any: - return http_date(value) - - def to_python(self, value: t.Any) -> t.Any: - return parse_date(value) - - -class TaggedJSONSerializer: - """Serializer that uses a tag system to compactly represent objects that - are not JSON types. Passed as the intermediate serializer to - :class:`itsdangerous.Serializer`. - - The following extra types are supported: - - * :class:`dict` - * :class:`tuple` - * :class:`bytes` - * :class:`~markupsafe.Markup` - * :class:`~uuid.UUID` - * :class:`~datetime.datetime` - """ - - __slots__ = ("tags", "order") - - #: Tag classes to bind when creating the serializer. Other tags can be - #: added later using :meth:`~register`. - default_tags = [ - TagDict, - PassDict, - TagTuple, - PassList, - TagBytes, - TagMarkup, - TagUUID, - TagDateTime, - ] - - def __init__(self) -> None: - self.tags: dict[str, JSONTag] = {} - self.order: list[JSONTag] = [] - - for cls in self.default_tags: - self.register(cls) - - def register( - self, - tag_class: type[JSONTag], - force: bool = False, - index: int | None = None, - ) -> None: - """Register a new tag with this serializer. - - :param tag_class: tag class to register. Will be instantiated with this - serializer instance. - :param force: overwrite an existing tag. If false (default), a - :exc:`KeyError` is raised. - :param index: index to insert the new tag in the tag order. Useful when - the new tag is a special case of an existing tag. If ``None`` - (default), the tag is appended to the end of the order. - - :raise KeyError: if the tag key is already registered and ``force`` is - not true. - """ - tag = tag_class(self) - key = tag.key - - if key is not None: - if not force and key in self.tags: - raise KeyError(f"Tag '{key}' is already registered.") - - self.tags[key] = tag - - if index is None: - self.order.append(tag) - else: - self.order.insert(index, tag) - - def tag(self, value: t.Any) -> dict[str, t.Any]: - """Convert a value to a tagged representation if necessary.""" - for tag in self.order: - if tag.check(value): - return tag.tag(value) - - return value - - def untag(self, value: dict[str, t.Any]) -> t.Any: - """Convert a tagged representation back to the original type.""" - if len(value) != 1: - return value - - key = next(iter(value)) - - if key not in self.tags: - return value - - return self.tags[key].to_python(value[key]) - - def dumps(self, value: t.Any) -> str: - """Tag the value and dump it to a compact JSON string.""" - return dumps(self.tag(value), separators=(",", ":")) - - def loads(self, value: str) -> t.Any: - """Load data from a JSON string and deserialized any tagged objects.""" - return loads(value, object_hook=self.untag) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/logging.py b/venv_flaskchat/lib/python3.11/site-packages/flask/logging.py deleted file mode 100644 index 99f6be8..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/logging.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import annotations - -import logging -import sys -import typing as t - -from werkzeug.local import LocalProxy - -from .globals import request - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - - -@LocalProxy -def wsgi_errors_stream() -> t.TextIO: - """Find the most appropriate error stream for the application. If a request - is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. - - If you configure your own :class:`logging.StreamHandler`, you may want to - use this for the stream. If you are using file or dict configuration and - can't import this directly, you can refer to it as - ``ext://flask.logging.wsgi_errors_stream``. - """ - return request.environ["wsgi.errors"] if request else sys.stderr - - -def has_level_handler(logger: logging.Logger) -> bool: - """Check if there is a handler in the logging chain that will handle the - given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. - """ - level = logger.getEffectiveLevel() - current = logger - - while current: - if any(handler.level <= level for handler in current.handlers): - return True - - if not current.propagate: - break - - current = current.parent # type: ignore - - return False - - -#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format -#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. -default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore -default_handler.setFormatter( - logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") -) - - -def create_logger(app: Flask) -> logging.Logger: - """Get the Flask app's logger and configure it if needed. - - The logger name will be the same as - :attr:`app.import_name `. - - When :attr:`~flask.Flask.debug` is enabled, set the logger level to - :data:`logging.DEBUG` if it is not set. - - If there is no handler for the logger's effective level, add a - :class:`~logging.StreamHandler` for - :func:`~flask.logging.wsgi_errors_stream` with a basic format. - """ - logger = logging.getLogger(app.name) - - if app.debug and not logger.level: - logger.setLevel(logging.DEBUG) - - if not has_level_handler(logger): - logger.addHandler(default_handler) - - return logger diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/py.typed b/venv_flaskchat/lib/python3.11/site-packages/flask/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/scaffold.py b/venv_flaskchat/lib/python3.11/site-packages/flask/scaffold.py deleted file mode 100644 index 6af6906..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/scaffold.py +++ /dev/null @@ -1,923 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import pathlib -import pkgutil -import sys -import typing as t -from collections import defaultdict -from datetime import timedelta -from functools import update_wrapper - -from jinja2 import FileSystemLoader -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.utils import cached_property - -from . import typing as ft -from .cli import AppGroup -from .globals import current_app -from .helpers import get_root_path -from .helpers import send_from_directory -from .templating import _default_template_ctx_processor - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - -# a singleton sentinel value for parameter defaults -_sentinel = object() - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) -T_route = t.TypeVar("T_route", bound=ft.RouteCallable) - - -def setupmethod(f: F) -> F: - f_name = f.__name__ - - def wrapper_func(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - self._check_setup_finished(f_name) - return f(self, *args, **kwargs) - - return t.cast(F, update_wrapper(wrapper_func, f)) - - -class Scaffold: - """Common behavior shared between :class:`~flask.Flask` and - :class:`~flask.blueprints.Blueprint`. - - :param import_name: The import name of the module where this object - is defined. Usually :attr:`__name__` should be used. - :param static_folder: Path to a folder of static files to serve. - If this is set, a static route will be added. - :param static_url_path: URL prefix for the static route. - :param template_folder: Path to a folder containing template files. - for rendering. If this is set, a Jinja loader will be added. - :param root_path: The path that static, template, and resource files - are relative to. Typically not set, it is discovered based on - the ``import_name``. - - .. versionadded:: 2.0 - """ - - name: str - _static_folder: str | None = None - _static_url_path: str | None = None - - def __init__( - self, - import_name: str, - static_folder: str | os.PathLike | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike | None = None, - root_path: str | None = None, - ): - #: The name of the package or module that this object belongs - #: to. Do not change this once it is set by the constructor. - self.import_name = import_name - - self.static_folder = static_folder # type: ignore - self.static_url_path = static_url_path - - #: The path to the templates folder, relative to - #: :attr:`root_path`, to add to the template loader. ``None`` if - #: templates should not be added. - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - #: Absolute path to the package on the filesystem. Used to look - #: up resources contained in the package. - self.root_path = root_path - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = AppGroup() - - #: A dictionary mapping endpoint names to view functions. - #: - #: To register a view function, use the :meth:`route` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.view_functions: dict[str, t.Callable] = {} - - #: A data structure of registered error handlers, in the format - #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is - #: the name of a blueprint the handlers are active for, or - #: ``None`` for all requests. The ``code`` key is the HTTP - #: status code for ``HTTPException``, or ``None`` for - #: other exceptions. The innermost dictionary maps exception - #: classes to handler functions. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.error_handler_spec: dict[ - ft.AppOrBlueprintKey, - dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], - ] = defaultdict(lambda: defaultdict(dict)) - - #: A data structure of functions to call at the beginning of - #: each request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`before_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.before_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`after_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.after_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.AfterRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request even if an exception is raised, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`teardown_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.teardown_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.TeardownCallable] - ] = defaultdict(list) - - #: A data structure of functions to call to pass extra context - #: values when rendering templates, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`context_processor` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.template_context_processors: dict[ - ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] - ] = defaultdict(list, {None: [_default_template_ctx_processor]}) - - #: A data structure of functions to call to modify the keyword - #: arguments passed to the view function, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the - #: :meth:`url_value_preprocessor` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_value_preprocessors: dict[ - ft.AppOrBlueprintKey, - list[ft.URLValuePreprocessorCallable], - ] = defaultdict(list) - - #: A data structure of functions to call to modify the keyword - #: arguments when generating URLs, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`url_defaults` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_default_functions: dict[ - ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] - ] = defaultdict(list) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.name!r}>" - - def _check_setup_finished(self, f_name: str) -> None: - raise NotImplementedError - - @property - def static_folder(self) -> str | None: - """The absolute path to the configured static folder. ``None`` - if no static folder is set. - """ - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - else: - return None - - @static_folder.setter - def static_folder(self, value: str | os.PathLike | None) -> None: - if value is not None: - value = os.fspath(value).rstrip(r"\/") - - self._static_folder = value - - @property - def has_static_folder(self) -> bool: - """``True`` if :attr:`static_folder` is set. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @property - def static_url_path(self) -> str | None: - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return f"/{basename}".rstrip("/") - - return None - - @static_url_path.setter - def static_url_path(self, value: str | None) -> None: - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - .. versionadded:: 0.5 - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - @cached_property - def jinja_loader(self) -> FileSystemLoader | None: - """The Jinja loader for this object's templates. By default this - is a class :class:`jinja2.loaders.FileSystemLoader` to - :attr:`template_folder` if it is set. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - else: - return None - - def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for - reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to - :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is - supported, valid values are "r" (or "rt") and "rb". - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - return open(os.path.join(self.root_path, resource), mode) - - def _method_route( - self, - method: str, - rule: str, - options: dict, - ) -> t.Callable[[T_route], T_route]: - if "methods" in options: - raise TypeError("Use the 'route' decorator to use the 'methods' argument.") - - return self.route(rule, methods=[method], **options) - - @setupmethod - def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["GET"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("GET", rule, options) - - @setupmethod - def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["POST"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("POST", rule, options) - - @setupmethod - def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PUT"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PUT", rule, options) - - @setupmethod - def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["DELETE"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("DELETE", rule, options) - - @setupmethod - def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PATCH"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PATCH", rule, options) - - @setupmethod - def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Decorate a view function to register it with the given URL - rule and options. Calls :meth:`add_url_rule`, which has more - details about the implementation. - - .. code-block:: python - - @app.route("/") - def index(): - return "Hello, World!" - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and - ``OPTIONS`` are added automatically. - - :param rule: The URL rule string. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - - def decorator(f: T_route) -> T_route: - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a rule for routing incoming requests and building - URLs. The :meth:`route` decorator is a shortcut to call this - with the ``view_func`` argument. These are equivalent: - - .. code-block:: python - - @app.route("/") - def index(): - ... - - .. code-block:: python - - def index(): - ... - - app.add_url_rule("/", view_func=index) - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. An error - will be raised if a function has already been registered for the - endpoint. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is - always added automatically, and ``OPTIONS`` is added - automatically by default. - - ``view_func`` does not necessarily need to be passed, but if the - rule should participate in routing an endpoint name must be - associated with a view function at some point with the - :meth:`endpoint` decorator. - - .. code-block:: python - - app.add_url_rule("/", endpoint="index") - - @app.endpoint("index") - def index(): - ... - - If ``view_func`` has a ``required_methods`` attribute, those - methods are added to the passed and automatic methods. If it - has a ``provide_automatic_methods`` attribute, it is used as the - default if the parameter is not passed. - - :param rule: The URL rule string. - :param endpoint: The endpoint name to associate with the rule - and view function. Used when routing and building URLs. - Defaults to ``view_func.__name__``. - :param view_func: The view function to associate with the - endpoint name. - :param provide_automatic_options: Add the ``OPTIONS`` method and - respond to ``OPTIONS`` requests automatically. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - raise NotImplementedError - - @setupmethod - def endpoint(self, endpoint: str) -> t.Callable[[F], F]: - """Decorate a view function to register it for the given - endpoint. Used if a rule is added without a ``view_func`` with - :meth:`add_url_rule`. - - .. code-block:: python - - app.add_url_rule("/ex", endpoint="example") - - @app.endpoint("example") - def example(): - ... - - :param endpoint: The endpoint name to associate with the view - function. - """ - - def decorator(f: F) -> F: - self.view_functions[endpoint] = f - return f - - return decorator - - @setupmethod - def before_request(self, f: T_before_request) -> T_before_request: - """Register a function to run before each request. - - For example, this can be used to open a database connection, or - to load the logged in user from the session. - - .. code-block:: python - - @app.before_request - def load_user(): - if "user_id" in session: - g.user = db.session.get(session["user_id"]) - - The function will be called without any arguments. If it returns - a non-``None`` value, the value is handled as if it was the - return value from the view, and further request handling is - stopped. - - This is available on both app and blueprint objects. When used on an app, this - executes before every request. When used on a blueprint, this executes before - every request that the blueprint handles. To register with a blueprint and - execute before every request, use :meth:`.Blueprint.before_app_request`. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def after_request(self, f: T_after_request) -> T_after_request: - """Register a function to run after each request to this object. - - The function is called with the response object, and must return - a response object. This allows the functions to modify or - replace the response before it is sent. - - If a function raises an exception, any remaining - ``after_request`` functions will not be called. Therefore, this - should not be used for actions that must execute, such as to - close resources. Use :meth:`teardown_request` for that. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.after_app_request`. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f: T_teardown) -> T_teardown: - """Register a function to be called when the request context is - popped. Typically this happens at the end of each request, but - contexts may be pushed manually as well during testing. - - .. code-block:: python - - with app.test_request_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the request context is - made inactive. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.teardown_app_request`. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def context_processor( - self, - f: T_template_context_processor, - ) -> T_template_context_processor: - """Registers a template context processor function. These functions run before - rendering a template. The keys of the returned dict are added as variables - available in the template. - - This is available on both app and blueprint objects. When used on an app, this - is called for every rendered template. When used on a blueprint, this is called - for templates rendered from the blueprint's views. To register with a blueprint - and affect every template, use :meth:`.Blueprint.app_context_processor`. - """ - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor( - self, - f: T_url_value_preprocessor, - ) -> T_url_value_preprocessor: - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_value_preprocessor`. - """ - self.url_value_preprocessors[None].append(f) - return f - - @setupmethod - def url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_defaults`. - """ - self.url_default_functions[None].append(f) - return f - - @setupmethod - def errorhandler( - self, code_or_exception: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - This is available on both app and blueprint objects. When used on an app, this - can handle errors from every request. When used on a blueprint, this can handle - errors from requests that the blueprint handles. To register with a blueprint - and affect every request, use :meth:`.Blueprint.app_errorhandler`. - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.register_error_handler(code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler( - self, - code_or_exception: type[Exception] | int, - f: ft.ErrorHandlerCallable, - ) -> None: - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - exc_class, code = self._get_exc_class_and_code(code_or_exception) - self.error_handler_spec[None][code][exc_class] = f - - @staticmethod - def _get_exc_class_and_code( - exc_class_or_code: type[Exception] | int, - ) -> tuple[type[Exception], int | None]: - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - exc_class: type[Exception] - - if isinstance(exc_class_or_code, int): - try: - exc_class = default_exceptions[exc_class_or_code] - except KeyError: - raise ValueError( - f"'{exc_class_or_code}' is not a recognized HTTP" - " error code. Use a subclass of HTTPException with" - " that code instead." - ) from None - else: - exc_class = exc_class_or_code - - if isinstance(exc_class, Exception): - raise TypeError( - f"{exc_class!r} is an instance, not a class. Handlers" - " can only be registered for Exception classes or HTTP" - " error codes." - ) - - if not issubclass(exc_class, Exception): - raise ValueError( - f"'{exc_class.__name__}' is not a subclass of Exception." - " Handlers can only be registered for Exception classes" - " or HTTP error codes." - ) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - -def _endpoint_from_view_func(view_func: t.Callable) -> str: - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def _matching_loader_thinks_module_is_package(loader, mod_name): - """Attempt to figure out if the given name is a package or a module. - - :param: loader: The loader that handled the name. - :param mod_name: The name of the package or module. - """ - # Use loader.is_package if it's available. - if hasattr(loader, "is_package"): - return loader.is_package(mod_name) - - cls = type(loader) - - # NamespaceLoader doesn't implement is_package, but all names it - # loads must be packages. - if cls.__module__ == "_frozen_importlib" and cls.__name__ == "NamespaceLoader": - return True - - # Otherwise we need to fail with an error that explains what went - # wrong. - raise AttributeError( - f"'{cls.__name__}.is_package()' must be implemented for PEP 302" - f" import hooks." - ) - - -def _path_is_relative_to(path: pathlib.PurePath, base: str) -> bool: - # Path.is_relative_to doesn't exist until Python 3.9 - try: - path.relative_to(base) - return True - except ValueError: - return False - - -def _find_package_path(import_name): - """Find the path that contains the package or module.""" - root_mod_name, _, _ = import_name.partition(".") - - try: - root_spec = importlib.util.find_spec(root_mod_name) - - if root_spec is None: - raise ValueError("not found") - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - *we* raised `ValueError` due to `root_spec` being `None` - except (ImportError, ValueError): - pass # handled below - else: - # namespace package - if root_spec.origin in {"namespace", None}: - package_spec = importlib.util.find_spec(import_name) - if package_spec is not None and package_spec.submodule_search_locations: - # Pick the path in the namespace that contains the submodule. - package_path = pathlib.Path( - os.path.commonpath(package_spec.submodule_search_locations) - ) - search_locations = ( - location - for location in root_spec.submodule_search_locations - if _path_is_relative_to(package_path, location) - ) - else: - # Pick the first path. - search_locations = iter(root_spec.submodule_search_locations) - return os.path.dirname(next(search_locations)) - # a package (with __init__.py) - elif root_spec.submodule_search_locations: - return os.path.dirname(os.path.dirname(root_spec.origin)) - # just a normal module - else: - return os.path.dirname(root_spec.origin) - - # we were unable to find the `package_path` using PEP 451 loaders - loader = pkgutil.get_loader(root_mod_name) - - if loader is None or root_mod_name == "__main__": - # import name is not found, or interactive/main module - return os.getcwd() - - if hasattr(loader, "get_filename"): - filename = loader.get_filename(root_mod_name) - elif hasattr(loader, "archive"): - # zipimporter's loader.archive points to the .egg or .zip file. - filename = loader.archive - else: - # At least one loader is missing both get_filename and archive: - # Google App Engine's HardenedModulesHook, use __file__. - filename = importlib.import_module(root_mod_name).__file__ - - package_path = os.path.abspath(os.path.dirname(filename)) - - # If the imported name is a package, filename is currently pointing - # to the root of the package, need to get the current directory. - if _matching_loader_thinks_module_is_package(loader, root_mod_name): - package_path = os.path.dirname(package_path) - - return package_path - - -def find_package(import_name: str): - """Find the prefix that a package is installed under, and the path - that it would be imported from. - - The prefix is the directory containing the standard directory - hierarchy (lib, bin, etc.). If the package is not installed to the - system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), - ``None`` is returned. - - The path is the entry in :attr:`sys.path` that contains the package - for import. If the package is not installed, it's assumed that the - package was imported from the current working directory. - """ - package_path = _find_package_path(import_name) - py_prefix = os.path.abspath(sys.prefix) - - # installed to the system - if _path_is_relative_to(pathlib.PurePath(package_path), py_prefix): - return py_prefix, package_path - - site_parent, site_folder = os.path.split(package_path) - - # installed to a virtualenv - if site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - - # Windows (prefix/lib/site-packages) - if folder.lower() == "lib": - return parent, package_path - - # Unix (prefix/lib/pythonX.Y/site-packages) - if os.path.basename(parent).lower() == "lib": - return os.path.dirname(parent), package_path - - # something else (prefix/site-packages) - return site_parent, package_path - - # not installed - return None, package_path diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/sessions.py b/venv_flaskchat/lib/python3.11/site-packages/flask/sessions.py deleted file mode 100644 index e5650d6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/sessions.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import hashlib -import typing as t -from collections.abc import MutableMapping -from datetime import datetime -from datetime import timezone - -from itsdangerous import BadSignature -from itsdangerous import URLSafeTimedSerializer -from werkzeug.datastructures import CallbackDict - -from .json.tag import TaggedJSONSerializer - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .wrappers import Request, Response - - -class SessionMixin(MutableMapping): - """Expands a basic dictionary with session attributes.""" - - @property - def permanent(self) -> bool: - """This reflects the ``'_permanent'`` key in the dict.""" - return self.get("_permanent", False) - - @permanent.setter - def permanent(self, value: bool) -> None: - self["_permanent"] = bool(value) - - #: Some implementations can detect whether a session is newly - #: created, but that is not guaranteed. Use with caution. The mixin - # default is hard-coded ``False``. - new = False - - #: Some implementations can detect changes to the session and set - #: this when that happens. The mixin default is hard coded to - #: ``True``. - modified = True - - #: Some implementations can detect when session data is read or - #: written and set this when that happens. The mixin default is hard - #: coded to ``True``. - accessed = True - - -class SecureCookieSession(CallbackDict, SessionMixin): - """Base class for sessions based on signed cookies. - - This session backend will set the :attr:`modified` and - :attr:`accessed` attributes. It cannot reliably track whether a - session is new (vs. empty), so :attr:`new` remains hard coded to - ``False``. - """ - - #: When data is changed, this is set to ``True``. Only the session - #: dictionary itself is tracked; if the session contains mutable - #: data (for example a nested dict) then this must be set to - #: ``True`` manually when modifying that data. The session cookie - #: will only be written to the response if this is ``True``. - modified = False - - #: When data is read or written, this is set to ``True``. Used by - # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` - #: header, which allows caching proxies to cache different pages for - #: different users. - accessed = False - - def __init__(self, initial: t.Any = None) -> None: - def on_update(self) -> None: - self.modified = True - self.accessed = True - - super().__init__(initial, on_update) - - def __getitem__(self, key: str) -> t.Any: - self.accessed = True - return super().__getitem__(key) - - def get(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().get(key, default) - - def setdefault(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().setdefault(key, default) - - -class NullSession(SecureCookieSession): - """Class used to generate nicer error messages if sessions are not - available. Will still allow read-only access to the empty session - but fail on setting. - """ - - def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - raise RuntimeError( - "The session is unavailable because no secret " - "key was set. Set the secret_key on the " - "application to something unique and secret." - ) - - __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # type: ignore # noqa: B950 - del _fail - - -class SessionInterface: - """The basic interface you have to implement in order to replace the - default session interface which uses werkzeug's securecookie - implementation. The only methods you have to implement are - :meth:`open_session` and :meth:`save_session`, the others have - useful defaults which you don't need to change. - - The session object returned by the :meth:`open_session` method has to - provide a dictionary like interface plus the properties and methods - from the :class:`SessionMixin`. We recommend just subclassing a dict - and adding that mixin:: - - class Session(dict, SessionMixin): - pass - - If :meth:`open_session` returns ``None`` Flask will call into - :meth:`make_null_session` to create a session that acts as replacement - if the session support cannot work because some requirement is not - fulfilled. The default :class:`NullSession` class that is created - will complain that the secret key was not set. - - To replace the session interface on an application all you have to do - is to assign :attr:`flask.Flask.session_interface`:: - - app = Flask(__name__) - app.session_interface = MySessionInterface() - - Multiple requests with the same session may be sent and handled - concurrently. When implementing a new session interface, consider - whether reads or writes to the backing store must be synchronized. - There is no guarantee on the order in which the session for each - request is opened or saved, it will occur in the order that requests - begin and end processing. - - .. versionadded:: 0.8 - """ - - #: :meth:`make_null_session` will look here for the class that should - #: be created when a null session is requested. Likewise the - #: :meth:`is_null_session` method will perform a typecheck against - #: this type. - null_session_class = NullSession - - #: A flag that indicates if the session interface is pickle based. - #: This can be used by Flask extensions to make a decision in regards - #: to how to deal with the session object. - #: - #: .. versionadded:: 0.10 - pickle_based = False - - def make_null_session(self, app: Flask) -> NullSession: - """Creates a null session which acts as a replacement object if the - real session support could not be loaded due to a configuration - error. This mainly aids the user experience because the job of the - null session is to still support lookup without complaining but - modifications are answered with a helpful error message of what - failed. - - This creates an instance of :attr:`null_session_class` by default. - """ - return self.null_session_class() - - def is_null_session(self, obj: object) -> bool: - """Checks if a given object is a null session. Null sessions are - not asked to be saved. - - This checks if the object is an instance of :attr:`null_session_class` - by default. - """ - return isinstance(obj, self.null_session_class) - - def get_cookie_name(self, app: Flask) -> str: - """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" - return app.config["SESSION_COOKIE_NAME"] - - def get_cookie_domain(self, app: Flask) -> str | None: - """The value of the ``Domain`` parameter on the session cookie. If not set, - browsers will only send the cookie to the exact domain it was set from. - Otherwise, they will send it to any subdomain of the given value as well. - - Uses the :data:`SESSION_COOKIE_DOMAIN` config. - - .. versionchanged:: 2.3 - Not set by default, does not fall back to ``SERVER_NAME``. - """ - rv = app.config["SESSION_COOKIE_DOMAIN"] - return rv if rv else None - - def get_cookie_path(self, app: Flask) -> str: - """Returns the path for which the cookie should be valid. The - default implementation uses the value from the ``SESSION_COOKIE_PATH`` - config var if it's set, and falls back to ``APPLICATION_ROOT`` or - uses ``/`` if it's ``None``. - """ - return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] - - def get_cookie_httponly(self, app: Flask) -> bool: - """Returns True if the session cookie should be httponly. This - currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` - config var. - """ - return app.config["SESSION_COOKIE_HTTPONLY"] - - def get_cookie_secure(self, app: Flask) -> bool: - """Returns True if the cookie should be secure. This currently - just returns the value of the ``SESSION_COOKIE_SECURE`` setting. - """ - return app.config["SESSION_COOKIE_SECURE"] - - def get_cookie_samesite(self, app: Flask) -> str: - """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the - ``SameSite`` attribute. This currently just returns the value of - the :data:`SESSION_COOKIE_SAMESITE` setting. - """ - return app.config["SESSION_COOKIE_SAMESITE"] - - def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: - """A helper method that returns an expiration date for the session - or ``None`` if the session is linked to the browser session. The - default implementation returns now + the permanent session - lifetime configured on the application. - """ - if session.permanent: - return datetime.now(timezone.utc) + app.permanent_session_lifetime - return None - - def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: - """Used by session backends to determine if a ``Set-Cookie`` header - should be set for this session cookie for this response. If the session - has been modified, the cookie is set. If the session is permanent and - the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is - always set. - - This check is usually skipped if the session was deleted. - - .. versionadded:: 0.11 - """ - - return session.modified or ( - session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] - ) - - def open_session(self, app: Flask, request: Request) -> SessionMixin | None: - """This is called at the beginning of each request, after - pushing the request context, before matching the URL. - - This must return an object which implements a dictionary-like - interface as well as the :class:`SessionMixin` interface. - - This will return ``None`` to indicate that loading failed in - some way that is not immediately an error. The request - context will fall back to using :meth:`make_null_session` - in this case. - """ - raise NotImplementedError() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - """This is called at the end of each request, after generating - a response, before removing the request context. It is skipped - if :meth:`is_null_session` returns ``True``. - """ - raise NotImplementedError() - - -session_json_serializer = TaggedJSONSerializer() - - -class SecureCookieSessionInterface(SessionInterface): - """The default session interface that stores sessions in signed cookies - through the :mod:`itsdangerous` module. - """ - - #: the salt that should be applied on top of the secret key for the - #: signing of cookie based sessions. - salt = "cookie-session" - #: the hash function to use for the signature. The default is sha1 - digest_method = staticmethod(hashlib.sha1) - #: the name of the itsdangerous supported key derivation. The default - #: is hmac. - key_derivation = "hmac" - #: A python serializer for the payload. The default is a compact - #: JSON derived serializer with support for some extra Python types - #: such as datetime objects or tuples. - serializer = session_json_serializer - session_class = SecureCookieSession - - def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: - if not app.secret_key: - return None - signer_kwargs = dict( - key_derivation=self.key_derivation, digest_method=self.digest_method - ) - return URLSafeTimedSerializer( - app.secret_key, - salt=self.salt, - serializer=self.serializer, - signer_kwargs=signer_kwargs, - ) - - def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: - s = self.get_signing_serializer(app) - if s is None: - return None - val = request.cookies.get(self.get_cookie_name(app)) - if not val: - return self.session_class() - max_age = int(app.permanent_session_lifetime.total_seconds()) - try: - data = s.loads(val, max_age=max_age) - return self.session_class(data) - except BadSignature: - return self.session_class() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - name = self.get_cookie_name(app) - domain = self.get_cookie_domain(app) - path = self.get_cookie_path(app) - secure = self.get_cookie_secure(app) - samesite = self.get_cookie_samesite(app) - httponly = self.get_cookie_httponly(app) - - # Add a "Vary: Cookie" header if the session was accessed at all. - if session.accessed: - response.vary.add("Cookie") - - # If the session is modified to be empty, remove the cookie. - # If the session is empty, return without setting the cookie. - if not session: - if session.modified: - response.delete_cookie( - name, - domain=domain, - path=path, - secure=secure, - samesite=samesite, - httponly=httponly, - ) - response.vary.add("Cookie") - - return - - if not self.should_set_cookie(app, session): - return - - expires = self.get_expiration_time(app, session) - val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore - response.set_cookie( - name, - val, # type: ignore - expires=expires, - httponly=httponly, - domain=domain, - path=path, - secure=secure, - samesite=samesite, - ) - response.vary.add("Cookie") diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/signals.py b/venv_flaskchat/lib/python3.11/site-packages/flask/signals.py deleted file mode 100644 index d79f21f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/signals.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -import typing as t -import warnings - -from blinker import Namespace - -# This namespace is only for signals provided by Flask itself. -_signals = Namespace() - -template_rendered = _signals.signal("template-rendered") -before_render_template = _signals.signal("before-render-template") -request_started = _signals.signal("request-started") -request_finished = _signals.signal("request-finished") -request_tearing_down = _signals.signal("request-tearing-down") -got_request_exception = _signals.signal("got-request-exception") -appcontext_tearing_down = _signals.signal("appcontext-tearing-down") -appcontext_pushed = _signals.signal("appcontext-pushed") -appcontext_popped = _signals.signal("appcontext-popped") -message_flashed = _signals.signal("message-flashed") - - -def __getattr__(name: str) -> t.Any: - if name == "signals_available": - warnings.warn( - "The 'signals_available' attribute is deprecated and will be removed in" - " Flask 2.4. Signals are always available.", - DeprecationWarning, - stacklevel=2, - ) - return True - - raise AttributeError(name) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/templating.py b/venv_flaskchat/lib/python3.11/site-packages/flask/templating.py deleted file mode 100644 index 769108f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/templating.py +++ /dev/null @@ -1,220 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2 import BaseLoader -from jinja2 import Environment as BaseEnvironment -from jinja2 import Template -from jinja2 import TemplateNotFound - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .helpers import stream_with_context -from .signals import before_render_template -from .signals import template_rendered - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .scaffold import Scaffold - - -def _default_template_ctx_processor() -> dict[str, t.Any]: - """Default template context processor. Injects `request`, - `session` and `g`. - """ - appctx = _cv_app.get(None) - reqctx = _cv_request.get(None) - rv: dict[str, t.Any] = {} - if appctx is not None: - rv["g"] = appctx.g - if reqctx is not None: - rv["request"] = reqctx.request - rv["session"] = reqctx.session - return rv - - -class Environment(BaseEnvironment): - """Works like a regular Jinja2 environment but has some additional - knowledge of how Flask's blueprint works so that it can prepend the - name of the blueprint to referenced templates if necessary. - """ - - def __init__(self, app: Flask, **options: t.Any) -> None: - if "loader" not in options: - options["loader"] = app.create_global_jinja_loader() - BaseEnvironment.__init__(self, **options) - self.app = app - - -class DispatchingJinjaLoader(BaseLoader): - """A loader that looks for templates in the application and all - the blueprint folders. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - - def get_source( # type: ignore - self, environment: Environment, template: str - ) -> tuple[str, str | None, t.Callable | None]: - if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: - return self._get_source_explained(environment, template) - return self._get_source_fast(environment, template) - - def _get_source_explained( - self, environment: Environment, template: str - ) -> tuple[str, str | None, t.Callable | None]: - attempts = [] - rv: tuple[str, str | None, t.Callable[[], bool] | None] | None - trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None - - for srcobj, loader in self._iter_loaders(template): - try: - rv = loader.get_source(environment, template) - if trv is None: - trv = rv - except TemplateNotFound: - rv = None - attempts.append((loader, srcobj, rv)) - - from .debughelpers import explain_template_loading_attempts - - explain_template_loading_attempts(self.app, template, attempts) - - if trv is not None: - return trv - raise TemplateNotFound(template) - - def _get_source_fast( - self, environment: Environment, template: str - ) -> tuple[str, str | None, t.Callable | None]: - for _srcobj, loader in self._iter_loaders(template): - try: - return loader.get_source(environment, template) - except TemplateNotFound: - continue - raise TemplateNotFound(template) - - def _iter_loaders( - self, template: str - ) -> t.Generator[tuple[Scaffold, BaseLoader], None, None]: - loader = self.app.jinja_loader - if loader is not None: - yield self.app, loader - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - yield blueprint, loader - - def list_templates(self) -> list[str]: - result = set() - loader = self.app.jinja_loader - if loader is not None: - result.update(loader.list_templates()) - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - for template in loader.list_templates(): - result.add(template) - - return list(result) - - -def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - rv = template.render(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - return rv - - -def render_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> str: - """Render a template by name with the given context. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _render(app, template, context) - - -def render_template_string(source: str, **context: t.Any) -> str: - """Render a template from the given source string with the given - context. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _render(app, template, context) - - -def _stream( - app: Flask, template: Template, context: dict[str, t.Any] -) -> t.Iterator[str]: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - def generate() -> t.Iterator[str]: - yield from template.generate(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - rv = generate() - - # If a request context is active, keep it while generating. - if request: - rv = stream_with_context(rv) - - return rv - - -def stream_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> t.Iterator[str]: - """Render a template by name with the given context as a stream. - This returns an iterator of strings, which can be used as a - streaming response from a view. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _stream(app, template, context) - - -def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: - """Render a template from the given source string with the given - context as a stream. This returns an iterator of strings, which can - be used as a streaming response from a view. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _stream(app, template, context) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/testing.py b/venv_flaskchat/lib/python3.11/site-packages/flask/testing.py deleted file mode 100644 index 773f152..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/testing.py +++ /dev/null @@ -1,282 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextlib import contextmanager -from contextlib import ExitStack -from copy import copy -from types import TracebackType -from urllib.parse import urlsplit - -import werkzeug.test -from click.testing import CliRunner -from werkzeug.test import Client -from werkzeug.wrappers import Request as BaseRequest - -from .cli import ScriptInfo -from .sessions import SessionMixin - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.test import TestResponse - - from .app import Flask - - -class EnvironBuilder(werkzeug.test.EnvironBuilder): - """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the - application. - - :param app: The Flask application to configure the environment from. - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - - def __init__( - self, - app: Flask, - path: str = "/", - base_url: str | None = None, - subdomain: str | None = None, - url_scheme: str | None = None, - *args: t.Any, - **kwargs: t.Any, - ) -> None: - assert not (base_url or subdomain or url_scheme) or ( - base_url is not None - ) != bool( - subdomain or url_scheme - ), 'Cannot pass "subdomain" or "url_scheme" with "base_url".' - - if base_url is None: - http_host = app.config.get("SERVER_NAME") or "localhost" - app_root = app.config["APPLICATION_ROOT"] - - if subdomain: - http_host = f"{subdomain}.{http_host}" - - if url_scheme is None: - url_scheme = app.config["PREFERRED_URL_SCHEME"] - - url = urlsplit(path) - base_url = ( - f"{url.scheme or url_scheme}://{url.netloc or http_host}" - f"/{app_root.lstrip('/')}" - ) - path = url.path - - if url.query: - sep = b"?" if isinstance(url.query, bytes) else "?" - path += sep + url.query - - self.app = app - super().__init__(path, base_url, *args, **kwargs) - - def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: # type: ignore - """Serialize ``obj`` to a JSON-formatted string. - - The serialization will be configured according to the config associated - with this EnvironBuilder's ``app``. - """ - return self.app.json.dumps(obj, **kwargs) - - -class FlaskClient(Client): - """Works like a regular Werkzeug test client but has knowledge about - Flask's contexts to defer the cleanup of the request context until - the end of a ``with`` block. For general information about how to - use this class refer to :class:`werkzeug.test.Client`. - - .. versionchanged:: 0.12 - `app.test_client()` includes preset default environment, which can be - set after instantiation of the `app.test_client()` object in - `client.environ_base`. - - Basic usage is outlined in the :doc:`/testing` chapter. - """ - - application: Flask - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - super().__init__(*args, **kwargs) - self.preserve_context = False - self._new_contexts: list[t.ContextManager[t.Any]] = [] - self._context_stack = ExitStack() - self.environ_base = { - "REMOTE_ADDR": "127.0.0.1", - "HTTP_USER_AGENT": f"werkzeug/{werkzeug.__version__}", - } - - @contextmanager - def session_transaction( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Generator[SessionMixin, None, None]: - """When used in combination with a ``with`` statement this opens a - session transaction. This can be used to modify the session that - the test client uses. Once the ``with`` block is left the session is - stored back. - - :: - - with client.session_transaction() as session: - session['value'] = 42 - - Internally this is implemented by going through a temporary test - request context and since session handling could depend on - request variables this function accepts the same arguments as - :meth:`~flask.Flask.test_request_context` which are directly - passed through. - """ - if self._cookies is None: - raise TypeError( - "Cookies are disabled. Create a client with 'use_cookies=True'." - ) - - app = self.application - ctx = app.test_request_context(*args, **kwargs) - self._add_cookies_to_wsgi(ctx.request.environ) - - with ctx: - sess = app.session_interface.open_session(app, ctx.request) - - if sess is None: - raise RuntimeError("Session backend did not open a session.") - - yield sess - resp = app.response_class() - - if app.session_interface.is_null_session(sess): - return - - with ctx: - app.session_interface.save_session(app, sess, resp) - - self._update_cookies_from_response( - ctx.request.host.partition(":")[0], - ctx.request.path, - resp.headers.getlist("Set-Cookie"), - ) - - def _copy_environ(self, other): - out = {**self.environ_base, **other} - - if self.preserve_context: - out["werkzeug.debug.preserve_context"] = self._new_contexts.append - - return out - - def _request_from_builder_args(self, args, kwargs): - kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) - builder = EnvironBuilder(self.application, *args, **kwargs) - - try: - return builder.get_request() - finally: - builder.close() - - def open( - self, - *args: t.Any, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> TestResponse: - if args and isinstance( - args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) - ): - if isinstance(args[0], werkzeug.test.EnvironBuilder): - builder = copy(args[0]) - builder.environ_base = self._copy_environ(builder.environ_base or {}) - request = builder.get_request() - elif isinstance(args[0], dict): - request = EnvironBuilder.from_environ( - args[0], app=self.application, environ_base=self._copy_environ({}) - ).get_request() - else: - # isinstance(args[0], BaseRequest) - request = copy(args[0]) - request.environ = self._copy_environ(request.environ) - else: - # request is None - request = self._request_from_builder_args(args, kwargs) - - # Pop any previously preserved contexts. This prevents contexts - # from being preserved across redirects or multiple requests - # within a single block. - self._context_stack.close() - - response = super().open( - request, - buffered=buffered, - follow_redirects=follow_redirects, - ) - response.json_module = self.application.json # type: ignore[assignment] - - # Re-push contexts that were preserved during the request. - while self._new_contexts: - cm = self._new_contexts.pop() - self._context_stack.enter_context(cm) - - return response - - def __enter__(self) -> FlaskClient: - if self.preserve_context: - raise RuntimeError("Cannot nest client invocations") - self.preserve_context = True - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.preserve_context = False - self._context_stack.close() - - -class FlaskCliRunner(CliRunner): - """A :class:`~click.testing.CliRunner` for testing a Flask app's - CLI commands. Typically created using - :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. - """ - - def __init__(self, app: Flask, **kwargs: t.Any) -> None: - self.app = app - super().__init__(**kwargs) - - def invoke( # type: ignore - self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any - ) -> t.Any: - """Invokes a CLI command in an isolated environment. See - :meth:`CliRunner.invoke ` for - full method documentation. See :ref:`testing-cli` for examples. - - If the ``obj`` argument is not given, passes an instance of - :class:`~flask.cli.ScriptInfo` that knows how to load the Flask - app being tested. - - :param cli: Command object to invoke. Default is the app's - :attr:`~flask.app.Flask.cli` group. - :param args: List of strings to invoke the command with. - - :return: a :class:`~click.testing.Result` object. - """ - if cli is None: - cli = self.app.cli # type: ignore - - if "obj" not in kwargs: - kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) - - return super().invoke(cli, args, **kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/typing.py b/venv_flaskchat/lib/python3.11/site-packages/flask/typing.py deleted file mode 100644 index 50aef7f..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/typing.py +++ /dev/null @@ -1,82 +0,0 @@ -from __future__ import annotations - -import typing as t - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIApplication # noqa: F401 - from werkzeug.datastructures import Headers # noqa: F401 - from werkzeug.wrappers import Response # noqa: F401 - -# The possible types that are directly convertible or are a Response object. -ResponseValue = t.Union[ - "Response", - str, - bytes, - t.List[t.Any], - # Only dict is actually accepted, but Mapping allows for TypedDict. - t.Mapping[str, t.Any], - t.Iterator[str], - t.Iterator[bytes], -] - -# the possible types for an individual HTTP header -# This should be a Union, but mypy doesn't pass unless it's a TypeVar. -HeaderValue = t.Union[str, t.List[str], t.Tuple[str, ...]] - -# the possible types for HTTP headers -HeadersValue = t.Union[ - "Headers", - t.Mapping[str, HeaderValue], - t.Sequence[t.Tuple[str, HeaderValue]], -] - -# The possible types returned by a route function. -ResponseReturnValue = t.Union[ - ResponseValue, - t.Tuple[ResponseValue, HeadersValue], - t.Tuple[ResponseValue, int], - t.Tuple[ResponseValue, int, HeadersValue], - "WSGIApplication", -] - -# Allow any subclass of werkzeug.Response, such as the one from Flask, -# as a callback argument. Using werkzeug.Response directly makes a -# callback annotated with flask.Response fail type checking. -ResponseClass = t.TypeVar("ResponseClass", bound="Response") - -AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named -AfterRequestCallable = t.Union[ - t.Callable[[ResponseClass], ResponseClass], - t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], -] -BeforeFirstRequestCallable = t.Union[ - t.Callable[[], None], t.Callable[[], t.Awaitable[None]] -] -BeforeRequestCallable = t.Union[ - t.Callable[[], t.Optional[ResponseReturnValue]], - t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], -] -ShellContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]] -TeardownCallable = t.Union[ - t.Callable[[t.Optional[BaseException]], None], - t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], -] -TemplateContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]] -TemplateFilterCallable = t.Callable[..., t.Any] -TemplateGlobalCallable = t.Callable[..., t.Any] -TemplateTestCallable = t.Callable[..., bool] -URLDefaultCallable = t.Callable[[str, dict], None] -URLValuePreprocessorCallable = t.Callable[[t.Optional[str], t.Optional[dict]], None] - -# This should take Exception, but that either breaks typing the argument -# with a specific exception, or decorating multiple times with different -# exceptions (and using a union type on the argument). -# https://github.com/pallets/flask/issues/4095 -# https://github.com/pallets/flask/issues/4295 -# https://github.com/pallets/flask/issues/4297 -ErrorHandlerCallable = t.Callable[[t.Any], ResponseReturnValue] - -RouteCallable = t.Union[ - t.Callable[..., ResponseReturnValue], - t.Callable[..., t.Awaitable[ResponseReturnValue]], -] diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/views.py b/venv_flaskchat/lib/python3.11/site-packages/flask/views.py deleted file mode 100644 index c7a2b62..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/views.py +++ /dev/null @@ -1,190 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import typing as ft -from .globals import current_app -from .globals import request - - -http_method_funcs = frozenset( - ["get", "post", "head", "options", "delete", "put", "trace", "patch"] -) - - -class View: - """Subclass this class and override :meth:`dispatch_request` to - create a generic class-based view. Call :meth:`as_view` to create a - view function that creates an instance of the class with the given - arguments and calls its ``dispatch_request`` method with any URL - variables. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class Hello(View): - init_every_request = False - - def dispatch_request(self, name): - return f"Hello, {name}!" - - app.add_url_rule( - "/hello/", view_func=Hello.as_view("hello") - ) - - Set :attr:`methods` on the class to change what methods the view - accepts. - - Set :attr:`decorators` on the class to apply a list of decorators to - the generated view function. Decorators applied to the class itself - will not be applied to the generated view function! - - Set :attr:`init_every_request` to ``False`` for efficiency, unless - you need to store request-global data on ``self``. - """ - - #: The methods this view is registered for. Uses the same default - #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and - #: ``add_url_rule`` by default. - methods: t.ClassVar[t.Collection[str] | None] = None - - #: Control whether the ``OPTIONS`` method is handled automatically. - #: Uses the same default (``True``) as ``route`` and - #: ``add_url_rule`` by default. - provide_automatic_options: t.ClassVar[bool | None] = None - - #: A list of decorators to apply, in order, to the generated view - #: function. Remember that ``@decorator`` syntax is applied bottom - #: to top, so the first decorator in the list would be the bottom - #: decorator. - #: - #: .. versionadded:: 0.8 - decorators: t.ClassVar[list[t.Callable]] = [] - - #: Create a new instance of this view class for every request by - #: default. If a view subclass sets this to ``False``, the same - #: instance is used for every request. - #: - #: A single instance is more efficient, especially if complex setup - #: is done during init. However, storing data on ``self`` is no - #: longer safe across requests, and :data:`~flask.g` should be used - #: instead. - #: - #: .. versionadded:: 2.2 - init_every_request: t.ClassVar[bool] = True - - def dispatch_request(self) -> ft.ResponseReturnValue: - """The actual view function behavior. Subclasses must override - this and return a valid response. Any variables from the URL - rule are passed as keyword arguments. - """ - raise NotImplementedError() - - @classmethod - def as_view( - cls, name: str, *class_args: t.Any, **class_kwargs: t.Any - ) -> ft.RouteCallable: - """Convert the class into a view function that can be registered - for a route. - - By default, the generated view will create a new instance of the - view class for every request and call its - :meth:`dispatch_request` method. If the view class sets - :attr:`init_every_request` to ``False``, the same instance will - be used for every request. - - Except for ``name``, all other arguments passed to this method - are forwarded to the view class ``__init__`` method. - - .. versionchanged:: 2.2 - Added the ``init_every_request`` class attribute. - """ - if cls.init_every_request: - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - self = view.view_class( # type: ignore[attr-defined] - *class_args, **class_kwargs - ) - return current_app.ensure_sync(self.dispatch_request)(**kwargs) - - else: - self = cls(*class_args, **class_kwargs) - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - return current_app.ensure_sync(self.dispatch_request)(**kwargs) - - if cls.decorators: - view.__name__ = name - view.__module__ = cls.__module__ - for decorator in cls.decorators: - view = decorator(view) - - # We attach the view class to the view function for two reasons: - # first of all it allows us to easily figure out what class-based - # view this thing came from, secondly it's also used for instantiating - # the view class so you can actually replace it with something else - # for testing purposes and debugging. - view.view_class = cls # type: ignore - view.__name__ = name - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.methods = cls.methods # type: ignore - view.provide_automatic_options = cls.provide_automatic_options # type: ignore - return view - - -class MethodView(View): - """Dispatches request methods to the corresponding instance methods. - For example, if you implement a ``get`` method, it will be used to - handle ``GET`` requests. - - This can be useful for defining a REST API. - - :attr:`methods` is automatically set based on the methods defined on - the class. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class CounterAPI(MethodView): - def get(self): - return str(session.get("counter", 0)) - - def post(self): - session["counter"] = session.get("counter", 0) + 1 - return redirect(url_for("counter")) - - app.add_url_rule( - "/counter", view_func=CounterAPI.as_view("counter") - ) - """ - - def __init_subclass__(cls, **kwargs: t.Any) -> None: - super().__init_subclass__(**kwargs) - - if "methods" not in cls.__dict__: - methods = set() - - for base in cls.__bases__: - if getattr(base, "methods", None): - methods.update(base.methods) # type: ignore[attr-defined] - - for key in http_method_funcs: - if hasattr(cls, key): - methods.add(key.upper()) - - if methods: - cls.methods = methods - - def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: - meth = getattr(self, request.method.lower(), None) - - # If the request method is HEAD and we don't have a handler for it - # retry with GET. - if meth is None and request.method == "HEAD": - meth = getattr(self, "get", None) - - assert meth is not None, f"Unimplemented method {request.method!r}" - return current_app.ensure_sync(meth)(**kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask/wrappers.py b/venv_flaskchat/lib/python3.11/site-packages/flask/wrappers.py deleted file mode 100644 index ef7aa38..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask/wrappers.py +++ /dev/null @@ -1,173 +0,0 @@ -from __future__ import annotations - -import typing as t - -from werkzeug.exceptions import BadRequest -from werkzeug.wrappers import Request as RequestBase -from werkzeug.wrappers import Response as ResponseBase - -from . import json -from .globals import current_app -from .helpers import _split_blueprint_path - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.routing import Rule - - -class Request(RequestBase): - """The request object used by default in Flask. Remembers the - matched endpoint and view arguments. - - It is what ends up as :class:`~flask.request`. If you want to replace - the request object used you can subclass this and set - :attr:`~flask.Flask.request_class` to your subclass. - - The request object is a :class:`~werkzeug.wrappers.Request` subclass and - provides all of the attributes Werkzeug defines plus a few Flask - specific ones. - """ - - json_module: t.Any = json - - #: The internal URL rule that matched the request. This can be - #: useful to inspect which methods are allowed for the URL from - #: a before/after handler (``request.url_rule.methods``) etc. - #: Though if the request's method was invalid for the URL rule, - #: the valid list is available in ``routing_exception.valid_methods`` - #: instead (an attribute of the Werkzeug exception - #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) - #: because the request was never internally bound. - #: - #: .. versionadded:: 0.6 - url_rule: Rule | None = None - - #: A dict of view arguments that matched the request. If an exception - #: happened when matching, this will be ``None``. - view_args: dict[str, t.Any] | None = None - - #: If matching the URL failed, this is the exception that will be - #: raised / was raised as part of the request handling. This is - #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or - #: something similar. - routing_exception: Exception | None = None - - @property - def max_content_length(self) -> int | None: # type: ignore - """Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" - if current_app: - return current_app.config["MAX_CONTENT_LENGTH"] - else: - return None - - @property - def endpoint(self) -> str | None: - """The endpoint that matched the request URL. - - This will be ``None`` if matching failed or has not been - performed yet. - - This in combination with :attr:`view_args` can be used to - reconstruct the same URL or a modified URL. - """ - if self.url_rule is not None: - return self.url_rule.endpoint - - return None - - @property - def blueprint(self) -> str | None: - """The registered name of the current blueprint. - - This will be ``None`` if the endpoint is not part of a - blueprint, or if URL matching failed or has not been performed - yet. - - This does not necessarily match the name the blueprint was - created with. It may have been nested, or registered with a - different name. - """ - endpoint = self.endpoint - - if endpoint is not None and "." in endpoint: - return endpoint.rpartition(".")[0] - - return None - - @property - def blueprints(self) -> list[str]: - """The registered names of the current blueprint upwards through - parent blueprints. - - This will be an empty list if there is no current blueprint, or - if URL matching failed. - - .. versionadded:: 2.0.1 - """ - name = self.blueprint - - if name is None: - return [] - - return _split_blueprint_path(name) - - def _load_form_data(self) -> None: - super()._load_form_data() - - # In debug mode we're replacing the files multidict with an ad-hoc - # subclass that raises a different error for key errors. - if ( - current_app - and current_app.debug - and self.mimetype != "multipart/form-data" - and not self.files - ): - from .debughelpers import attach_enctype_error_multidict - - attach_enctype_error_multidict(self) - - def on_json_loading_failed(self, e: ValueError | None) -> t.Any: - try: - return super().on_json_loading_failed(e) - except BadRequest as e: - if current_app and current_app.debug: - raise - - raise BadRequest() from e - - -class Response(ResponseBase): - """The response object that is used by default in Flask. Works like the - response object from Werkzeug but is set to have an HTML mimetype by - default. Quite often you don't have to create this object yourself because - :meth:`~flask.Flask.make_response` will take care of that for you. - - If you want to replace the response object used you can subclass this and - set :attr:`~flask.Flask.response_class` to your subclass. - - .. versionchanged:: 1.0 - JSON support is added to the response, like the request. This is useful - when testing to get the test client response data as JSON. - - .. versionchanged:: 1.0 - - Added :attr:`max_cookie_size`. - """ - - default_mimetype: str | None = "text/html" - - json_module = json - - autocorrect_location_header = False - - @property - def max_cookie_size(self) -> int: # type: ignore - """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. - - See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in - Werkzeug's docs. - """ - if current_app: - return current_app.config["MAX_COOKIE_SIZE"] - - # return Werkzeug's default when not in an app context - return super().max_cookie_size diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__init__.py deleted file mode 100644 index adea0e2..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__init__.py +++ /dev/null @@ -1,1118 +0,0 @@ -from functools import wraps -import os -import sys - -# make sure gevent-socketio is not installed, as it conflicts with -# python-socketio -gevent_socketio_found = True -try: - from socketio import socketio_manage # noqa: F401 -except ImportError: - gevent_socketio_found = False -if gevent_socketio_found: - print('The gevent-socketio package is incompatible with this version of ' - 'the Flask-SocketIO extension. Please uninstall it, and then ' - 'install the latest version of python-socketio in its place.') - sys.exit(1) - -import flask -from flask import has_request_context, json as flask_json -from flask.sessions import SessionMixin -import socketio -from socketio.exceptions import ConnectionRefusedError # noqa: F401 -from werkzeug.debug import DebuggedApplication -from werkzeug._reloader import run_with_reloader - -from .namespace import Namespace -from .test_client import SocketIOTestClient - - -class _SocketIOMiddleware(socketio.WSGIApp): - """This WSGI middleware simply exposes the Flask application in the WSGI - environment before executing the request. - """ - def __init__(self, socketio_app, flask_app, socketio_path='socket.io'): - self.flask_app = flask_app - super(_SocketIOMiddleware, self).__init__(socketio_app, - flask_app.wsgi_app, - socketio_path=socketio_path) - - def __call__(self, environ, start_response): - environ = environ.copy() - environ['flask.app'] = self.flask_app - return super(_SocketIOMiddleware, self).__call__(environ, - start_response) - - -class _ManagedSession(dict, SessionMixin): - """This class is used for user sessions that are managed by - Flask-SocketIO. It is simple dict, expanded with the Flask session - attributes.""" - pass - - -class SocketIO(object): - """Create a Flask-SocketIO server. - - :param app: The flask application instance. If the application instance - isn't known at the time this class is instantiated, then call - ``socketio.init_app(app)`` once the application instance is - available. - :param manage_session: If set to ``True``, this extension manages the user - session for Socket.IO events. If set to ``False``, - Flask's own session management is used. When using - Flask's cookie based sessions it is recommended that - you leave this set to the default of ``True``. When - using server-side sessions, a ``False`` setting - enables sharing the user session between HTTP routes - and Socket.IO events. - :param message_queue: A connection URL for a message queue service the - server can use for multi-process communication. A - message queue is not required when using a single - server process. - :param channel: The channel name, when using a message queue. If a channel - isn't specified, a default channel will be used. If - multiple clusters of SocketIO processes need to use the - same message queue without interfering with each other, - then each cluster should use a different channel. - :param path: The path where the Socket.IO server is exposed. Defaults to - ``'socket.io'``. Leave this as is unless you know what you are - doing. - :param resource: Alias to ``path``. - :param kwargs: Socket.IO and Engine.IO server options. - - The Socket.IO server options are detailed below: - - :param client_manager: The client manager instance that will manage the - client list. When this is omitted, the client list - is stored in an in-memory structure, so the use of - multiple connected servers is not possible. In most - cases, this argument does not need to be set - explicitly. - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. Note that fatal errors will be logged even when - ``logger`` is ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. To use the same json encoder and decoder as a Flask - application, use ``flask.json``. - :param async_handlers: If set to ``True``, event handlers for a client are - executed in separate threads. To run handlers for a - client synchronously, set to ``False``. The default - is ``True``. - :param always_connect: When set to ``False``, new connections are - provisory until the connect handler returns - something other than ``False``, at which point they - are accepted. When set to ``True``, connections are - immediately accepted, and then if the connect - handler returns ``False`` a disconnect is issued. - Set to ``True`` if you need to emit events from the - connect handler and your client is confused when it - receives events before the connection acceptance. - In any other case use the default of ``False``. - - The Engine.IO server configuration supports the following settings: - - :param async_mode: The asynchronous model to use. See the Deployment - section in the documentation for a description of the - available options. Valid async modes are ``threading``, - ``eventlet``, ``gevent`` and ``gevent_uwsgi``. If this - argument is not given, ``eventlet`` is tried first, then - ``gevent_uwsgi``, then ``gevent``, and finally - ``threading``. The first async mode that has all its - dependencies installed is then one that is chosen. - :param ping_interval: The interval in seconds at which the server pings - the client. The default is 25 seconds. For advanced - control, a two element tuple can be given, where - the first number is the ping interval and the second - is a grace period added by the server. - :param ping_timeout: The time in seconds that the client waits for the - server to respond before disconnecting. The default - is 5 seconds. - :param max_http_buffer_size: The maximum size of a message when using the - polling transport. The default is 1,000,000 - bytes. - :param allow_upgrades: Whether to allow transport upgrades or not. The - default is ``True``. - :param http_compression: Whether to compress packages when using the - polling transport. The default is ``True``. - :param compression_threshold: Only compress messages when their byte size - is greater than this value. The default is - 1024 bytes. - :param cookie: If set to a string, it is the name of the HTTP cookie the - server sends back to the client containing the client - session id. If set to a dictionary, the ``'name'`` key - contains the cookie name and other keys define cookie - attributes, where the value of each attribute can be a - string, a callable with no arguments, or a boolean. If set - to ``None`` (the default), a cookie is not sent to the - client. - :param cors_allowed_origins: Origin or list of origins that are allowed to - connect to this server. Only the same origin - is allowed by default. Set this argument to - ``'*'`` to allow all origins, or to ``[]`` to - disable CORS handling. - :param cors_credentials: Whether credentials (cookies, authentication) are - allowed in requests to this server. The default is - ``True``. - :param monitor_clients: If set to ``True``, a background task will ensure - inactive clients are closed. Set to ``False`` to - disable the monitoring task (not recommended). The - default is ``True``. - :param engineio_logger: To enable Engine.IO logging set to ``True`` or pass - a logger object to use. To disable logging set to - ``False``. The default is ``False``. Note that - fatal errors are logged even when - ``engineio_logger`` is ``False``. - """ - - def __init__(self, app=None, **kwargs): - self.server = None - self.server_options = {} - self.wsgi_server = None - self.handlers = [] - self.namespace_handlers = [] - self.exception_handlers = {} - self.default_exception_handler = None - self.manage_session = True - # We can call init_app when: - # - we were given the Flask app instance (standard initialization) - # - we were not given the app, but we were given a message_queue - # (standard initialization for auxiliary process) - # In all other cases we collect the arguments and assume the client - # will call init_app from an app factory function. - if app is not None or 'message_queue' in kwargs: - self.init_app(app, **kwargs) - else: - self.server_options.update(kwargs) - - def init_app(self, app, **kwargs): - if app is not None: - if not hasattr(app, 'extensions'): - app.extensions = {} # pragma: no cover - app.extensions['socketio'] = self - self.server_options.update(kwargs) - self.manage_session = self.server_options.pop('manage_session', - self.manage_session) - - if 'client_manager' not in kwargs: - url = self.server_options.get('message_queue', None) - channel = self.server_options.pop('channel', 'flask-socketio') - write_only = app is None - if url: - if url.startswith(('redis://', "rediss://")): - queue_class = socketio.RedisManager - elif url.startswith(('kafka://')): - queue_class = socketio.KafkaManager - elif url.startswith('zmq'): - queue_class = socketio.ZmqManager - else: - queue_class = socketio.KombuManager - queue = queue_class(url, channel=channel, - write_only=write_only) - self.server_options['client_manager'] = queue - - if 'json' in self.server_options and \ - self.server_options['json'] == flask_json: - # flask's json module is tricky to use because its output - # changes when it is invoked inside or outside the app context - # so here to prevent any ambiguities we replace it with wrappers - # that ensure that the app context is always present - class FlaskSafeJSON(object): - @staticmethod - def dumps(*args, **kwargs): - with app.app_context(): - return flask_json.dumps(*args, **kwargs) - - @staticmethod - def loads(*args, **kwargs): - with app.app_context(): - return flask_json.loads(*args, **kwargs) - - self.server_options['json'] = FlaskSafeJSON - - resource = self.server_options.pop('path', None) or \ - self.server_options.pop('resource', None) or 'socket.io' - if resource.startswith('/'): - resource = resource[1:] - if os.environ.get('FLASK_RUN_FROM_CLI'): - if self.server_options.get('async_mode') is None: - self.server_options['async_mode'] = 'threading' - self.server = socketio.Server(**self.server_options) - self.async_mode = self.server.async_mode - for handler in self.handlers: - self.server.on(handler[0], handler[1], namespace=handler[2]) - for namespace_handler in self.namespace_handlers: - self.server.register_namespace(namespace_handler) - - if app is not None: - # here we attach the SocketIO middleware to the SocketIO object so - # it can be referenced later if debug middleware needs to be - # inserted - self.sockio_mw = _SocketIOMiddleware(self.server, app, - socketio_path=resource) - app.wsgi_app = self.sockio_mw - - def on(self, message, namespace=None): - """Decorator to register a SocketIO event handler. - - This decorator must be applied to SocketIO event handlers. Example:: - - @socketio.on('my event', namespace='/chat') - def handle_my_custom_event(json): - print('received json: ' + str(json)) - - :param message: The name of the event. This is normally a user defined - string, but a few event names are already defined. Use - ``'message'`` to define a handler that takes a string - payload, ``'json'`` to define a handler that takes a - JSON blob payload, ``'connect'`` or ``'disconnect'`` - to create handlers for connection and disconnection - events. - :param namespace: The namespace on which the handler is to be - registered. Defaults to the global namespace. - """ - namespace = namespace or '/' - - def decorator(handler): - @wraps(handler) - def _handler(sid, *args): - return self._handle_event(handler, message, namespace, sid, - *args) - - if self.server: - self.server.on(message, _handler, namespace=namespace) - else: - self.handlers.append((message, _handler, namespace)) - return handler - return decorator - - def on_error(self, namespace=None): - """Decorator to define a custom error handler for SocketIO events. - - This decorator can be applied to a function that acts as an error - handler for a namespace. This handler will be invoked when a SocketIO - event handler raises an exception. The handler function must accept one - argument, which is the exception raised. Example:: - - @socketio.on_error(namespace='/chat') - def chat_error_handler(e): - print('An error has occurred: ' + str(e)) - - :param namespace: The namespace for which to register the error - handler. Defaults to the global namespace. - """ - namespace = namespace or '/' - - def decorator(exception_handler): - if not callable(exception_handler): - raise ValueError('exception_handler must be callable') - self.exception_handlers[namespace] = exception_handler - return exception_handler - return decorator - - def on_error_default(self, exception_handler): - """Decorator to define a default error handler for SocketIO events. - - This decorator can be applied to a function that acts as a default - error handler for any namespaces that do not have a specific handler. - Example:: - - @socketio.on_error_default - def error_handler(e): - print('An error has occurred: ' + str(e)) - """ - if not callable(exception_handler): - raise ValueError('exception_handler must be callable') - self.default_exception_handler = exception_handler - return exception_handler - - def on_event(self, message, handler, namespace=None): - """Register a SocketIO event handler. - - ``on_event`` is the non-decorator version of ``'on'``. - - Example:: - - def on_foo_event(json): - print('received json: ' + str(json)) - - socketio.on_event('my event', on_foo_event, namespace='/chat') - - :param message: The name of the event. This is normally a user defined - string, but a few event names are already defined. Use - ``'message'`` to define a handler that takes a string - payload, ``'json'`` to define a handler that takes a - JSON blob payload, ``'connect'`` or ``'disconnect'`` - to create handlers for connection and disconnection - events. - :param handler: The function that handles the event. - :param namespace: The namespace on which the handler is to be - registered. Defaults to the global namespace. - """ - self.on(message, namespace=namespace)(handler) - - def event(self, *args, **kwargs): - """Decorator to register an event handler. - - This is a simplified version of the ``on()`` method that takes the - event name from the decorated function. - - Example usage:: - - @socketio.event - def my_event(data): - print('Received data: ', data) - - The above example is equivalent to:: - - @socketio.on('my_event') - def my_event(data): - print('Received data: ', data) - - A custom namespace can be given as an argument to the decorator:: - - @socketio.event(namespace='/test') - def my_event(data): - print('Received data: ', data) - """ - if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): - # the decorator was invoked without arguments - # args[0] is the decorated function - return self.on(args[0].__name__)(args[0]) - else: - # the decorator was invoked with arguments - def set_handler(handler): - return self.on(handler.__name__, *args, **kwargs)(handler) - - return set_handler - - def on_namespace(self, namespace_handler): - if not isinstance(namespace_handler, Namespace): - raise ValueError('Not a namespace instance.') - namespace_handler._set_socketio(self) - if self.server: - self.server.register_namespace(namespace_handler) - else: - self.namespace_handlers.append(namespace_handler) - - def emit(self, event, *args, **kwargs): - """Emit a server generated SocketIO event. - - This function emits a SocketIO event to one or more connected clients. - A JSON blob can be attached to the event as payload. This function can - be used outside of a SocketIO event context, so it is appropriate to - use when the server is the originator of an event, outside of any - client context, such as in a regular HTTP request handler or a - background task. Example:: - - @app.route('/ping') - def ping(): - socketio.emit('ping event', {'data': 42}, namespace='/chat') - - :param event: The name of the user event to emit. - :param args: A dictionary with the JSON data to send as payload. - :param namespace: The namespace under which the message is to be sent. - Defaults to the global namespace. - :param to: Send the message to all the users in the given room, or to - the user with the given session ID. If this parameter is not - included, the event is sent to all connected users. - :param include_self: ``True`` to include the sender when broadcasting - or addressing a room, or ``False`` to send to - everyone but the sender. - :param skip_sid: The session id of a client to ignore when broadcasting - or addressing a room. This is typically set to the - originator of the message, so that everyone except - that client receive the message. To skip multiple sids - pass a list. - :param callback: If given, this function will be called to acknowledge - that the client has received the message. The - arguments that will be passed to the function are - those provided by the client. Callback functions can - only be used when addressing an individual client. - """ - namespace = kwargs.pop('namespace', '/') - to = kwargs.pop('to', None) or kwargs.pop('room', None) - include_self = kwargs.pop('include_self', True) - skip_sid = kwargs.pop('skip_sid', None) - if not include_self and not skip_sid: - skip_sid = flask.request.sid - callback = kwargs.pop('callback', None) - if callback: - # wrap the callback so that it sets app app and request contexts - sid = None - original_callback = callback - original_namespace = namespace - if has_request_context(): - sid = getattr(flask.request, 'sid', None) - original_namespace = getattr(flask.request, 'namespace', None) - - def _callback_wrapper(*args): - return self._handle_event(original_callback, None, - original_namespace, sid, *args) - - if sid: - # the callback wrapper above will install a request context - # before invoking the original callback - # we only use it if the emit was issued from a Socket.IO - # populated request context (i.e. request.sid is defined) - callback = _callback_wrapper - self.server.emit(event, *args, namespace=namespace, to=to, - skip_sid=skip_sid, callback=callback, **kwargs) - - def call(self, event, *args, **kwargs): # pragma: no cover - """Emit a SocketIO event and wait for the response. - - This method issues an emit with a callback and waits for the callback - to be invoked by the client before returning. If the callback isn’t - invoked before the timeout, then a TimeoutError exception is raised. If - the Socket.IO connection drops during the wait, this method still waits - until the specified timeout. Example:: - - def get_status(client, data): - status = call('status', {'data': data}, to=client) - - :param event: The name of the user event to emit. - :param args: A dictionary with the JSON data to send as payload. - :param namespace: The namespace under which the message is to be sent. - Defaults to the global namespace. - :param to: The session ID of the recipient client. - :param timeout: The waiting timeout. If the timeout is reached before - the client acknowledges the event, then a - ``TimeoutError`` exception is raised. The default is 60 - seconds. - :param ignore_queue: Only used when a message queue is configured. If - set to ``True``, the event is emitted to the - client directly, without going through the queue. - This is more efficient, but only works when a - single server process is used, or when there is a - single addressee. It is recommended to always - leave this parameter with its default value of - ``False``. - """ - namespace = kwargs.pop('namespace', '/') - to = kwargs.pop('to', None) or kwargs.pop('room', None) - return self.server.call(event, *args, namespace=namespace, to=to, - **kwargs) - - def send(self, data, json=False, namespace=None, to=None, - callback=None, include_self=True, skip_sid=None, **kwargs): - """Send a server-generated SocketIO message. - - This function sends a simple SocketIO message to one or more connected - clients. The message can be a string or a JSON blob. This is a simpler - version of ``emit()``, which should be preferred. This function can be - used outside of a SocketIO event context, so it is appropriate to use - when the server is the originator of an event. - - :param data: The message to send, either a string or a JSON blob. - :param json: ``True`` if ``message`` is a JSON blob, ``False`` - otherwise. - :param namespace: The namespace under which the message is to be sent. - Defaults to the global namespace. - :param to: Send the message to all the users in the given room, or to - the user with the given session ID. If this parameter is not - included, the event is sent to all connected users. - :param include_self: ``True`` to include the sender when broadcasting - or addressing a room, or ``False`` to send to - everyone but the sender. - :param skip_sid: The session id of a client to ignore when broadcasting - or addressing a room. This is typically set to the - originator of the message, so that everyone except - that client receive the message. To skip multiple sids - pass a list. - :param callback: If given, this function will be called to acknowledge - that the client has received the message. The - arguments that will be passed to the function are - those provided by the client. Callback functions can - only be used when addressing an individual client. - """ - skip_sid = flask.request.sid if not include_self else skip_sid - if json: - self.emit('json', data, namespace=namespace, to=to, - skip_sid=skip_sid, callback=callback, **kwargs) - else: - self.emit('message', data, namespace=namespace, to=to, - skip_sid=skip_sid, callback=callback, **kwargs) - - def close_room(self, room, namespace=None): - """Close a room. - - This function removes any users that are in the given room and then - deletes the room from the server. This function can be used outside - of a SocketIO event context. - - :param room: The name of the room to close. - :param namespace: The namespace under which the room exists. Defaults - to the global namespace. - """ - self.server.close_room(room, namespace) - - def run(self, app, host=None, port=None, **kwargs): # pragma: no cover - """Run the SocketIO web server. - - :param app: The Flask application instance. - :param host: The hostname or IP address for the server to listen on. - Defaults to 127.0.0.1. - :param port: The port number for the server to listen on. Defaults to - 5000. - :param debug: ``True`` to start the server in debug mode, ``False`` to - start in normal mode. - :param use_reloader: ``True`` to enable the Flask reloader, ``False`` - to disable it. - :param reloader_options: A dictionary with options that are passed to - the Flask reloader, such as ``extra_files``, - ``reloader_type``, etc. - :param extra_files: A list of additional files that the Flask - reloader should watch. Defaults to ``None``. - Deprecated, use ``reloader_options`` instead. - :param log_output: If ``True``, the server logs all incoming - connections. If ``False`` logging is disabled. - Defaults to ``True`` in debug mode, ``False`` - in normal mode. Unused when the threading async - mode is used. - :param allow_unsafe_werkzeug: Set to ``True`` to allow the use of the - Werkzeug web server in a production - setting. Default is ``False``. Set to - ``True`` at your own risk. - :param kwargs: Additional web server options. The web server options - are specific to the server used in each of the supported - async modes. Note that options provided here will - not be seen when using an external web server such - as gunicorn, since this method is not called in that - case. - """ - if host is None: - host = '127.0.0.1' - if port is None: - server_name = app.config['SERVER_NAME'] - if server_name and ':' in server_name: - port = int(server_name.rsplit(':', 1)[1]) - else: - port = 5000 - - debug = kwargs.pop('debug', app.debug) - log_output = kwargs.pop('log_output', debug) - use_reloader = kwargs.pop('use_reloader', debug) - extra_files = kwargs.pop('extra_files', None) - reloader_options = kwargs.pop('reloader_options', {}) - if extra_files: - reloader_options['extra_files'] = extra_files - - app.debug = debug - if app.debug and self.server.eio.async_mode != 'threading': - # put the debug middleware between the SocketIO middleware - # and the Flask application instance - # - # mw1 mw2 mw3 Flask app - # o ---- o ---- o ---- o - # / - # o Flask-SocketIO - # \ middleware - # o - # Flask-SocketIO WebSocket handler - # - # BECOMES - # - # dbg-mw mw1 mw2 mw3 Flask app - # o ---- o ---- o ---- o ---- o - # / - # o Flask-SocketIO - # \ middleware - # o - # Flask-SocketIO WebSocket handler - # - self.sockio_mw.wsgi_app = DebuggedApplication( - self.sockio_mw.wsgi_app, evalex=True) - - if self.server.eio.async_mode == 'threading': - try: - import simple_websocket # noqa: F401 - except ImportError: - from werkzeug._internal import _log - _log('warning', 'WebSocket transport not available. Install ' - 'simple-websocket for improved performance.') - allow_unsafe_werkzeug = kwargs.pop('allow_unsafe_werkzeug', - False) - if not sys.stdin or not sys.stdin.isatty(): # pragma: no cover - if not allow_unsafe_werkzeug: - raise RuntimeError('The Werkzeug web server is not ' - 'designed to run in production. Pass ' - 'allow_unsafe_werkzeug=True to the ' - 'run() method to disable this error.') - else: - from werkzeug._internal import _log - _log('warning', ('Werkzeug appears to be used in a ' - 'production deployment. Consider ' - 'switching to a production web server ' - 'instead.')) - app.run(host=host, port=port, threaded=True, - use_reloader=use_reloader, **reloader_options, **kwargs) - elif self.server.eio.async_mode == 'eventlet': - def run_server(): - import eventlet - import eventlet.wsgi - import eventlet.green - addresses = eventlet.green.socket.getaddrinfo(host, port) - if not addresses: - raise RuntimeError( - 'Could not resolve host to a valid address') - eventlet_socket = eventlet.listen(addresses[0][4], - addresses[0][0]) - - # If provided an SSL argument, use an SSL socket - ssl_args = ['keyfile', 'certfile', 'server_side', 'cert_reqs', - 'ssl_version', 'ca_certs', - 'do_handshake_on_connect', 'suppress_ragged_eofs', - 'ciphers'] - ssl_params = {k: kwargs[k] for k in kwargs - if k in ssl_args and kwargs[k] is not None} - for k in ssl_args: - kwargs.pop(k, None) - if len(ssl_params) > 0: - ssl_params['server_side'] = True # Listening requires true - eventlet_socket = eventlet.wrap_ssl(eventlet_socket, - **ssl_params) - - eventlet.wsgi.server(eventlet_socket, app, - log_output=log_output, **kwargs) - - if use_reloader: - run_with_reloader(run_server, **reloader_options) - else: - run_server() - elif self.server.eio.async_mode == 'gevent': - from gevent import pywsgi - try: - from geventwebsocket.handler import WebSocketHandler - websocket = True - except ImportError: - app.logger.warning( - 'WebSocket transport not available. Install ' - 'gevent-websocket for improved performance.') - websocket = False - - log = 'default' - if not log_output: - log = None - if websocket: - self.wsgi_server = pywsgi.WSGIServer( - (host, port), app, handler_class=WebSocketHandler, - log=log, **kwargs) - else: - self.wsgi_server = pywsgi.WSGIServer((host, port), app, - log=log, **kwargs) - - if use_reloader: - # monkey patching is required by the reloader - from gevent import monkey - monkey.patch_thread() - monkey.patch_time() - - def run_server(): - self.wsgi_server.serve_forever() - - run_with_reloader(run_server, **reloader_options) - else: - self.wsgi_server.serve_forever() - - def stop(self): - """Stop a running SocketIO web server. - - This method must be called from a HTTP or SocketIO handler function. - """ - if self.server.eio.async_mode == 'threading': - func = flask.request.environ.get('werkzeug.server.shutdown') - if func: - func() - else: - raise RuntimeError('Cannot stop unknown web server') - elif self.server.eio.async_mode == 'eventlet': - raise SystemExit - elif self.server.eio.async_mode == 'gevent': - self.wsgi_server.stop() - - def start_background_task(self, target, *args, **kwargs): - """Start a background task using the appropriate async model. - - This is a utility function that applications can use to start a - background task using the method that is compatible with the - selected async mode. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object that represents the background task, - on which the ``join()`` method can be invoked to wait for the task to - complete. - """ - return self.server.start_background_task(target, *args, **kwargs) - - def sleep(self, seconds=0): - """Sleep for the requested amount of time using the appropriate async - model. - - This is a utility function that applications can use to put a task to - sleep without having to worry about using the correct call for the - selected async mode. - """ - return self.server.sleep(seconds) - - def test_client(self, app, namespace=None, query_string=None, - headers=None, auth=None, flask_test_client=None): - """The Socket.IO test client is useful for testing a Flask-SocketIO - server. It works in a similar way to the Flask Test Client, but - adapted to the Socket.IO server. - - :param app: The Flask application instance. - :param namespace: The namespace for the client. If not provided, the - client connects to the server on the global - namespace. - :param query_string: A string with custom query string arguments. - :param headers: A dictionary with custom HTTP headers. - :param auth: Optional authentication data, given as a dictionary. - :param flask_test_client: The instance of the Flask test client - currently in use. Passing the Flask test - client is optional, but is necessary if you - want the Flask user session and any other - cookies set in HTTP routes accessible from - Socket.IO events. - """ - return SocketIOTestClient(app, self, namespace=namespace, - query_string=query_string, headers=headers, - auth=auth, - flask_test_client=flask_test_client) - - def _handle_event(self, handler, message, namespace, sid, *args): - environ = self.server.get_environ(sid, namespace=namespace) - if not environ: - # we don't have record of this client, ignore this event - return '', 400 - app = environ['flask.app'] - with app.request_context(environ): - if self.manage_session: - # manage a separate session for this client's Socket.IO events - # created as a copy of the regular user session - if 'saved_session' not in environ: - environ['saved_session'] = _ManagedSession(flask.session) - session_obj = environ['saved_session'] - if hasattr(flask, 'globals') and \ - hasattr(flask.globals, 'request_ctx'): - # update session for Flask >= 2.2 - ctx = flask.globals.request_ctx._get_current_object() - else: # pragma: no cover - # update session for Flask < 2.2 - ctx = flask._request_ctx_stack.top - ctx.session = session_obj - else: - # let Flask handle the user session - # for cookie based sessions, this effectively freezes the - # session to its state at connection time - # for server-side sessions, this allows HTTP and Socket.IO to - # share the session, with both having read/write access to it - session_obj = flask.session._get_current_object() - flask.request.sid = sid - flask.request.namespace = namespace - flask.request.event = {'message': message, 'args': args} - try: - if message == 'connect': - auth = args[1] if len(args) > 1 else None - try: - ret = handler(auth) - except TypeError: - ret = handler() - else: - ret = handler(*args) - except ConnectionRefusedError: - raise # let this error bubble up to python-socketio - except: - err_handler = self.exception_handlers.get( - namespace, self.default_exception_handler) - if err_handler is None: - raise - type, value, traceback = sys.exc_info() - return err_handler(value) - if not self.manage_session: - # when Flask is managing the user session, it needs to save it - if not hasattr(session_obj, 'modified') or \ - session_obj.modified: - resp = app.response_class() - app.session_interface.save_session(app, session_obj, resp) - return ret - - -def emit(event, *args, **kwargs): - """Emit a SocketIO event. - - This function emits a SocketIO event to one or more connected clients. A - JSON blob can be attached to the event as payload. This is a function that - can only be called from a SocketIO event handler, as in obtains some - information from the current client context. Example:: - - @socketio.on('my event') - def handle_my_custom_event(json): - emit('my response', {'data': 42}) - - :param event: The name of the user event to emit. - :param args: A dictionary with the JSON data to send as payload. - :param namespace: The namespace under which the message is to be sent. - Defaults to the namespace used by the originating event. - A ``'/'`` can be used to explicitly specify the global - namespace. - :param callback: Callback function to invoke with the client's - acknowledgement. - :param broadcast: ``True`` to send the message to all clients, or ``False`` - to only reply to the sender of the originating event. - :param to: Send the message to all the users in the given room, or to the - user with the given session ID. If this argument is not set and - ``broadcast`` is ``False``, then the message is sent only to the - originating user. - :param include_self: ``True`` to include the sender when broadcasting or - addressing a room, or ``False`` to send to everyone - but the sender. - :param skip_sid: The session id of a client to ignore when broadcasting - or addressing a room. This is typically set to the - originator of the message, so that everyone except - that client receive the message. To skip multiple sids - pass a list. - :param ignore_queue: Only used when a message queue is configured. If - set to ``True``, the event is emitted to the - clients directly, without going through the queue. - This is more efficient, but only works when a - single server process is used, or when there is a - single addressee. It is recommended to always leave - this parameter with its default value of ``False``. - """ - if 'namespace' in kwargs: - namespace = kwargs['namespace'] - else: - namespace = flask.request.namespace - callback = kwargs.get('callback') - broadcast = kwargs.get('broadcast') - to = kwargs.pop('to', None) or kwargs.pop('room', None) - if to is None and not broadcast: - to = flask.request.sid - include_self = kwargs.get('include_self', True) - skip_sid = kwargs.get('skip_sid') - ignore_queue = kwargs.get('ignore_queue', False) - - socketio = flask.current_app.extensions['socketio'] - return socketio.emit(event, *args, namespace=namespace, to=to, - include_self=include_self, skip_sid=skip_sid, - callback=callback, ignore_queue=ignore_queue) - - -def call(event, *args, **kwargs): # pragma: no cover - """Emit a SocketIO event and wait for the response. - - This function issues an emit with a callback and waits for the callback to - be invoked by the client before returning. If the callback isn’t invoked - before the timeout, then a TimeoutError exception is raised. If the - Socket.IO connection drops during the wait, this method still waits until - the specified timeout. Example:: - - def get_status(client, data): - status = call('status', {'data': data}, to=client) - - :param event: The name of the user event to emit. - :param args: A dictionary with the JSON data to send as payload. - :param namespace: The namespace under which the message is to be sent. - Defaults to the namespace used by the originating event. - A ``'/'`` can be used to explicitly specify the global - namespace. - :param to: The session ID of the recipient client. If this argument is not - given, the event is sent to the originating client. - :param timeout: The waiting timeout. If the timeout is reached before the - client acknowledges the event, then a ``TimeoutError`` - exception is raised. The default is 60 seconds. - :param ignore_queue: Only used when a message queue is configured. If - set to ``True``, the event is emitted to the - client directly, without going through the queue. - This is more efficient, but only works when a - single server process is used, or when there is a - single addressee. It is recommended to always leave - this parameter with its default value of ``False``. - """ - if 'namespace' in kwargs: - namespace = kwargs['namespace'] - else: - namespace = flask.request.namespace - to = kwargs.pop('to', None) or kwargs.pop('room', None) - if to is None: - to = flask.request.sid - timeout = kwargs.get('timeout', 60) - ignore_queue = kwargs.get('ignore_queue', False) - - socketio = flask.current_app.extensions['socketio'] - return socketio.call(event, *args, namespace=namespace, to=to, - ignore_queue=ignore_queue, timeout=timeout) - - -def send(message, **kwargs): - """Send a SocketIO message. - - This function sends a simple SocketIO message to one or more connected - clients. The message can be a string or a JSON blob. This is a simpler - version of ``emit()``, which should be preferred. This is a function that - can only be called from a SocketIO event handler. - - :param message: The message to send, either a string or a JSON blob. - :param json: ``True`` if ``message`` is a JSON blob, ``False`` - otherwise. - :param namespace: The namespace under which the message is to be sent. - Defaults to the namespace used by the originating event. - An empty string can be used to use the global namespace. - :param callback: Callback function to invoke with the client's - acknowledgement. - :param broadcast: ``True`` to send the message to all connected clients, or - ``False`` to only reply to the sender of the originating - event. - :param to: Send the message to all the users in the given room, or to the - user with the given session ID. If this argument is not set and - ``broadcast`` is ``False``, then the message is sent only to the - originating user. - :param include_self: ``True`` to include the sender when broadcasting or - addressing a room, or ``False`` to send to everyone - but the sender. - :param skip_sid: The session id of a client to ignore when broadcasting - or addressing a room. This is typically set to the - originator of the message, so that everyone except - that client receive the message. To skip multiple sids - pass a list. - :param ignore_queue: Only used when a message queue is configured. If - set to ``True``, the event is emitted to the - clients directly, without going through the queue. - This is more efficient, but only works when a - single server process is used, or when there is a - single addressee. It is recommended to always leave - this parameter with its default value of ``False``. - """ - json = kwargs.get('json', False) - if 'namespace' in kwargs: - namespace = kwargs['namespace'] - else: - namespace = flask.request.namespace - callback = kwargs.get('callback') - broadcast = kwargs.get('broadcast') - to = kwargs.pop('to', None) or kwargs.pop('room', None) - if to is None and not broadcast: - to = flask.request.sid - include_self = kwargs.get('include_self', True) - skip_sid = kwargs.get('skip_sid') - ignore_queue = kwargs.get('ignore_queue', False) - - socketio = flask.current_app.extensions['socketio'] - return socketio.send(message, json=json, namespace=namespace, to=to, - include_self=include_self, skip_sid=skip_sid, - callback=callback, ignore_queue=ignore_queue) - - -def join_room(room, sid=None, namespace=None): - """Join a room. - - This function puts the user in a room, under the current namespace. The - user and the namespace are obtained from the event context. This is a - function that can only be called from a SocketIO event handler. Example:: - - @socketio.on('join') - def on_join(data): - username = session['username'] - room = data['room'] - join_room(room) - send(username + ' has entered the room.', to=room) - - :param room: The name of the room to join. - :param sid: The session id of the client. If not provided, the client is - obtained from the request context. - :param namespace: The namespace for the room. If not provided, the - namespace is obtained from the request context. - """ - socketio = flask.current_app.extensions['socketio'] - sid = sid or flask.request.sid - namespace = namespace or flask.request.namespace - socketio.server.enter_room(sid, room, namespace=namespace) - - -def leave_room(room, sid=None, namespace=None): - """Leave a room. - - This function removes the user from a room, under the current namespace. - The user and the namespace are obtained from the event context. Example:: - - @socketio.on('leave') - def on_leave(data): - username = session['username'] - room = data['room'] - leave_room(room) - send(username + ' has left the room.', to=room) - - :param room: The name of the room to leave. - :param sid: The session id of the client. If not provided, the client is - obtained from the request context. - :param namespace: The namespace for the room. If not provided, the - namespace is obtained from the request context. - """ - socketio = flask.current_app.extensions['socketio'] - sid = sid or flask.request.sid - namespace = namespace or flask.request.namespace - socketio.server.leave_room(sid, room, namespace=namespace) - - -def close_room(room, namespace=None): - """Close a room. - - This function removes any users that are in the given room and then deletes - the room from the server. - - :param room: The name of the room to close. - :param namespace: The namespace for the room. If not provided, the - namespace is obtained from the request context. - """ - socketio = flask.current_app.extensions['socketio'] - namespace = namespace or flask.request.namespace - socketio.server.close_room(room, namespace=namespace) - - -def rooms(sid=None, namespace=None): - """Return a list of the rooms the client is in. - - This function returns all the rooms the client has entered, including its - own room, assigned by the Socket.IO server. - - :param sid: The session id of the client. If not provided, the client is - obtained from the request context. - :param namespace: The namespace for the room. If not provided, the - namespace is obtained from the request context. - """ - socketio = flask.current_app.extensions['socketio'] - sid = sid or flask.request.sid - namespace = namespace or flask.request.namespace - return socketio.server.rooms(sid, namespace=namespace) - - -def disconnect(sid=None, namespace=None, silent=False): - """Disconnect the client. - - This function terminates the connection with the client. As a result of - this call the client will receive a disconnect event. Example:: - - @socketio.on('message') - def receive_message(msg): - if is_banned(session['username']): - disconnect() - else: - # ... - - :param sid: The session id of the client. If not provided, the client is - obtained from the request context. - :param namespace: The namespace for the room. If not provided, the - namespace is obtained from the request context. - :param silent: this option is deprecated. - """ - socketio = flask.current_app.extensions['socketio'] - sid = sid or flask.request.sid - namespace = namespace or flask.request.namespace - return socketio.server.disconnect(sid, namespace=namespace) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 2c8f85a..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/namespace.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/namespace.cpython-311.pyc deleted file mode 100644 index 3974a91..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/namespace.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/test_client.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/test_client.cpython-311.pyc deleted file mode 100644 index 566611c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/__pycache__/test_client.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/namespace.py b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/namespace.py deleted file mode 100644 index 43833a9..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/namespace.py +++ /dev/null @@ -1,46 +0,0 @@ -from socketio import Namespace as _Namespace - - -class Namespace(_Namespace): - def __init__(self, namespace=None): - super(Namespace, self).__init__(namespace) - self.socketio = None - - def _set_socketio(self, socketio): - self.socketio = socketio - - def trigger_event(self, event, *args): - """Dispatch an event to the proper handler method. - - In the most common usage, this method is not overloaded by subclasses, - as it performs the routing of events to methods. However, this - method can be overridden if special dispatching rules are needed, or if - having a single method that catches all events is desired. - """ - handler_name = 'on_' + event - if not hasattr(self, handler_name): - # there is no handler for this event, so we ignore it - return - handler = getattr(self, handler_name) - return self.socketio._handle_event(handler, event, self.namespace, - *args) - - def emit(self, event, data=None, room=None, include_self=True, - namespace=None, callback=None): - """Emit a custom event to one or more connected clients.""" - return self.socketio.emit(event, data, room=room, - include_self=include_self, - namespace=namespace or self.namespace, - callback=callback) - - def send(self, data, room=None, include_self=True, namespace=None, - callback=None): - """Send a message to one or more connected clients.""" - return self.socketio.send(data, room=room, include_self=include_self, - namespace=namespace or self.namespace, - callback=callback) - - def close_room(self, room, namespace=None): - """Close a room.""" - return self.socketio.close_room(room=room, - namespace=namespace or self.namespace) diff --git a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/test_client.py b/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/test_client.py deleted file mode 100644 index f930c97..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/flask_socketio/test_client.py +++ /dev/null @@ -1,218 +0,0 @@ -import uuid - -from socketio import packet -from socketio.pubsub_manager import PubSubManager -from werkzeug.test import EnvironBuilder - - -class SocketIOTestClient(object): - """ - This class is useful for testing a Flask-SocketIO server. It works in a - similar way to the Flask Test Client, but adapted to the Socket.IO server. - - :param app: The Flask application instance. - :param socketio: The application's ``SocketIO`` instance. - :param namespace: The namespace for the client. If not provided, the client - connects to the server on the global namespace. - :param query_string: A string with custom query string arguments. - :param headers: A dictionary with custom HTTP headers. - :param auth: Optional authentication data, given as a dictionary. - :param flask_test_client: The instance of the Flask test client - currently in use. Passing the Flask test - client is optional, but is necessary if you - want the Flask user session and any other - cookies set in HTTP routes accessible from - Socket.IO events. - """ - clients = {} - - def __init__(self, app, socketio, namespace=None, query_string=None, - headers=None, auth=None, flask_test_client=None): - def _mock_send_packet(eio_sid, pkt): - # make sure the packet can be encoded and decoded - epkt = pkt.encode() - if not isinstance(epkt, list): - pkt = packet.Packet(encoded_packet=epkt) - else: - pkt = packet.Packet(encoded_packet=epkt[0]) - for att in epkt[1:]: - pkt.add_attachment(att) - client = self.clients.get(eio_sid) - if not client: - return - if pkt.packet_type == packet.EVENT or \ - pkt.packet_type == packet.BINARY_EVENT: - if pkt.data[0] == 'message' or pkt.data[0] == 'json': - client.queue.append({ - 'name': pkt.data[0], - 'args': pkt.data[1], - 'namespace': pkt.namespace or '/'}) - else: - client.queue.append({ - 'name': pkt.data[0], - 'args': pkt.data[1:], - 'namespace': pkt.namespace or '/'}) - elif pkt.packet_type == packet.ACK or \ - pkt.packet_type == packet.BINARY_ACK: - client.acks = {'args': pkt.data, - 'namespace': pkt.namespace or '/'} - elif pkt.packet_type in [packet.DISCONNECT, packet.CONNECT_ERROR]: - client.connected[pkt.namespace or '/'] = False - - self.app = app - self.flask_test_client = flask_test_client - self.eio_sid = uuid.uuid4().hex - self.clients[self.eio_sid] = self - self.callback_counter = 0 - self.socketio = socketio - self.connected = {} - self.queue = [] - self.acks = None - socketio.server._send_packet = _mock_send_packet - socketio.server.environ[self.eio_sid] = {} - socketio.server.async_handlers = False # easier to test when - socketio.server.eio.async_handlers = False # events are sync - if isinstance(socketio.server.manager, PubSubManager): - raise RuntimeError('Test client cannot be used with a message ' - 'queue. Disable the queue on your test ' - 'configuration.') - socketio.server.manager.initialize() - self.connect(namespace=namespace, query_string=query_string, - headers=headers, auth=auth) - - def is_connected(self, namespace=None): - """Check if a namespace is connected. - - :param namespace: The namespace to check. The global namespace is - assumed if this argument is not provided. - """ - return self.connected.get(namespace or '/', False) - - def connect(self, namespace=None, query_string=None, headers=None, - auth=None): - """Connect the client. - - :param namespace: The namespace for the client. If not provided, the - client connects to the server on the global - namespace. - :param query_string: A string with custom query string arguments. - :param headers: A dictionary with custom HTTP headers. - :param auth: Optional authentication data, given as a dictionary. - - Note that it is usually not necessary to explicitly call this method, - since a connection is automatically established when an instance of - this class is created. An example where it this method would be useful - is when the application accepts multiple namespace connections. - """ - url = '/socket.io' - namespace = namespace or '/' - if query_string: - if query_string[0] != '?': - query_string = '?' + query_string - url += query_string - environ = EnvironBuilder(url, headers=headers).get_environ() - environ['flask.app'] = self.app - if self.flask_test_client: - # inject cookies from Flask - if hasattr(self.flask_test_client, '_add_cookies_to_wsgi'): - # flask >= 2.3 - self.flask_test_client._add_cookies_to_wsgi(environ) - else: # pragma: no cover - # flask < 2.3 - self.flask_test_client.cookie_jar.inject_wsgi(environ) - self.socketio.server._handle_eio_connect(self.eio_sid, environ) - pkt = packet.Packet(packet.CONNECT, auth, namespace=namespace) - self.socketio.server._handle_eio_message(self.eio_sid, pkt.encode()) - sid = self.socketio.server.manager.sid_from_eio_sid(self.eio_sid, - namespace) - if sid: - self.connected[namespace] = True - - def disconnect(self, namespace=None): - """Disconnect the client. - - :param namespace: The namespace to disconnect. The global namespace is - assumed if this argument is not provided. - """ - if not self.is_connected(namespace): - raise RuntimeError('not connected') - pkt = packet.Packet(packet.DISCONNECT, namespace=namespace) - self.socketio.server._handle_eio_message(self.eio_sid, pkt.encode()) - del self.connected[namespace or '/'] - - def emit(self, event, *args, **kwargs): - """Emit an event to the server. - - :param event: The event name. - :param *args: The event arguments. - :param callback: ``True`` if the client requests a callback, ``False`` - if not. Note that client-side callbacks are not - implemented, a callback request will just tell the - server to provide the arguments to invoke the - callback, but no callback is invoked. Instead, the - arguments that the server provided for the callback - are returned by this function. - :param namespace: The namespace of the event. The global namespace is - assumed if this argument is not provided. - """ - namespace = kwargs.pop('namespace', None) - if not self.is_connected(namespace): - raise RuntimeError('not connected') - callback = kwargs.pop('callback', False) - id = None - if callback: - self.callback_counter += 1 - id = self.callback_counter - pkt = packet.Packet(packet.EVENT, data=[event] + list(args), - namespace=namespace, id=id) - encoded_pkt = pkt.encode() - if isinstance(encoded_pkt, list): - for epkt in encoded_pkt: - self.socketio.server._handle_eio_message(self.eio_sid, epkt) - else: - self.socketio.server._handle_eio_message(self.eio_sid, encoded_pkt) - if self.acks is not None: - ack = self.acks - self.acks = None - return ack['args'][0] if len(ack['args']) == 1 \ - else ack['args'] - - def send(self, data, json=False, callback=False, namespace=None): - """Send a text or JSON message to the server. - - :param data: A string, dictionary or list to send to the server. - :param json: ``True`` to send a JSON message, ``False`` to send a text - message. - :param callback: ``True`` if the client requests a callback, ``False`` - if not. Note that client-side callbacks are not - implemented, a callback request will just tell the - server to provide the arguments to invoke the - callback, but no callback is invoked. Instead, the - arguments that the server provided for the callback - are returned by this function. - :param namespace: The namespace of the event. The global namespace is - assumed if this argument is not provided. - """ - if json: - msg = 'json' - else: - msg = 'message' - return self.emit(msg, data, callback=callback, namespace=namespace) - - def get_received(self, namespace=None): - """Return the list of messages received from the server. - - Since this is not a real client, any time the server emits an event, - the event is simply stored. The test code can invoke this method to - obtain the list of events that were received since the last call. - - :param namespace: The namespace to get events from. The global - namespace is assumed if this argument is not - provided. - """ - if not self.is_connected(namespace): - raise RuntimeError('not connected') - namespace = namespace or '/' - r = [pkt for pkt in self.queue if pkt['namespace'] == namespace] - self.queue = [pkt for pkt in self.queue if pkt not in r] - return r diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst deleted file mode 100644 index 7b190ca..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/METADATA b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/METADATA deleted file mode 100644 index 1d935ed..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/METADATA +++ /dev/null @@ -1,97 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.1.2 -Summary: Safely pass data to untrusted environments and back. -Home-page: https://palletsprojects.com/p/itsdangerous/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/itsdangerous/ -Project-URL: Issue Tracker, https://github.com/pallets/itsdangerous/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -ItsDangerous -============ - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U itsdangerous - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -.. code-block:: python - - from itsdangerous import URLSafeSerializer - auth_s = URLSafeSerializer("secret key", "auth") - token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - - print(token) - # eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - - data = auth_s.loads(token) - print(data["name"]) - # itsdangerous - - -Donate ------- - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://itsdangerous.palletsprojects.com/ -- Changes: https://itsdangerous.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/ItsDangerous/ -- Source Code: https://github.com/pallets/itsdangerous/ -- Issue Tracker: https://github.com/pallets/itsdangerous/issues/ -- Website: https://palletsprojects.com/p/itsdangerous/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/RECORD b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/RECORD deleted file mode 100644 index d943693..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/RECORD +++ /dev/null @@ -1,23 +0,0 @@ -itsdangerous-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.1.2.dist-info/LICENSE.rst,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.1.2.dist-info/METADATA,sha256=ThrHIJQ_6XlfbDMCAVe_hawT7IXiIxnTBIDrwxxtucQ,2928 -itsdangerous-2.1.2.dist-info/RECORD,, -itsdangerous-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -itsdangerous-2.1.2.dist-info/top_level.txt,sha256=gKN1OKLk81i7fbWWildJA88EQ9NhnGMSvZqhfz9ICjk,13 -itsdangerous/__init__.py,sha256=n4mkyjlIVn23pgsgCIw0MJKPdcHIetyeRpe5Fwsn8qg,876 -itsdangerous/__pycache__/__init__.cpython-311.pyc,, -itsdangerous/__pycache__/_json.cpython-311.pyc,, -itsdangerous/__pycache__/encoding.cpython-311.pyc,, -itsdangerous/__pycache__/exc.cpython-311.pyc,, -itsdangerous/__pycache__/serializer.cpython-311.pyc,, -itsdangerous/__pycache__/signer.cpython-311.pyc,, -itsdangerous/__pycache__/timed.cpython-311.pyc,, -itsdangerous/__pycache__/url_safe.cpython-311.pyc,, -itsdangerous/_json.py,sha256=wIhs_7-_XZolmyr-JvKNiy_LgAcfevYR0qhCVdlIhg8,450 -itsdangerous/encoding.py,sha256=pgh86snHC76dPLNCnPlrjR5SaYL_M8H-gWRiiLNbhCU,1419 -itsdangerous/exc.py,sha256=VFxmP2lMoSJFqxNMzWonqs35ROII4-fvCBfG0v1Tkbs,3206 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=zgZ1-U705jHDpt62x_pmLJdryEKDNAbt5UkJtnkcCSw,11144 -itsdangerous/signer.py,sha256=QUH0iX0in-OTptMAXKU5zWMwmOCXn1fsDsubXiGdFN4,9367 -itsdangerous/timed.py,sha256=5CBWLds4Nm8-3bFVC8RxNzFjx6PSwjch8wuZ5cwcHFI,8174 -itsdangerous/url_safe.py,sha256=5bC4jSKOjWNRkWrFseifWVXUnHnPgwOLROjiOwb-eeo,2402 diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/WHEEL b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt deleted file mode 100644 index e163955..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -itsdangerous diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__init__.py deleted file mode 100644 index fdb2dfd..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - -__version__ = "2.1.2" diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index f3022ae..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc deleted file mode 100644 index 7461883..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc deleted file mode 100644 index 3e4f408..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc deleted file mode 100644 index 0cb1e90..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc deleted file mode 100644 index 2058905..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc deleted file mode 100644 index 1d39d0d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc deleted file mode 100644 index 4688d51..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc deleted file mode 100644 index 7fcab52..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/_json.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/_json.py deleted file mode 100644 index c70d37a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,16 +0,0 @@ -import json as _json -import typing as _t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: _t.Union[str, bytes]) -> _t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: _t.Any, **kwargs: _t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/encoding.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/encoding.py deleted file mode 100644 index edb04d1..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -import base64 -import string -import struct -import typing as _t - -from .exc import BadData - -_t_str_bytes = _t.Union[str, bytes] - - -def want_bytes( - s: _t_str_bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: _t_str_bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: _t_str_bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = _t.cast("_t.Callable[[bytes], _t.Tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/exc.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/exc.py deleted file mode 100644 index c38a6af..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,107 +0,0 @@ -import typing as _t -from datetime import datetime - -_t_opt_any = _t.Optional[_t.Any] -_t_opt_exc = _t.Optional[Exception] - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: _t_opt_any = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: _t_opt_any = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: _t_opt_any = None, - date_signed: _t.Optional[datetime] = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: _t_opt_any = None, - header: _t_opt_any = None, - original_error: _t_opt_exc = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: _t_opt_any = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: _t_opt_exc = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: _t_opt_exc = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: _t_opt_exc = original_error diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/py.typed b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/serializer.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 9f4a84a..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,295 +0,0 @@ -import json -import typing as _t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_kwargs = _t.Dict[str, _t.Any] -_t_opt_kwargs = _t.Optional[_t_kwargs] -_t_signer = _t.Type[Signer] -_t_fallbacks = _t.List[_t.Union[_t_kwargs, _t.Tuple[_t_signer, _t_kwargs], _t_signer]] -_t_load_unsafe = _t.Tuple[bool, _t.Any] -_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] - - -def is_text_serializer(serializer: _t.Any) -> bool: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer: - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _t.Any = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: _t_signer = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: _t_fallbacks = [] - - def __init__( - self, - secret_key: _t_secret_key, - salt: _t_opt_str_bytes = b"itsdangerous", - serializer: _t.Any = None, - serializer_kwargs: _t_opt_kwargs = None, - signer: _t.Optional[_t_signer] = None, - signer_kwargs: _t_opt_kwargs = None, - fallback_signers: _t.Optional[_t_fallbacks] = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _t.Any = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: _t_signer = signer - self.signer_kwargs: _t_kwargs = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers or ()) - - self.fallback_signers: _t_fallbacks = fallback_signers - self.serializer_kwargs: _t_kwargs = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _t.Optional[_t.Any] = None - ) -> _t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - serializer = self.serializer - is_text = self.is_text_serializer - else: - is_text = is_text_serializer(serializer) - - try: - if is_text: - return serializer.loads(payload.decode("utf-8")) - - return serializer.loads(payload) - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: _t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: _t_opt_str_bytes = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: _t_opt_str_bytes = None) -> _t.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: _t.Any, salt: _t_opt_str_bytes = None) -> _t_str_bytes: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") - - return rv - - def dump(self, obj: _t.Any, f: _t.IO, salt: _t_opt_str_bytes = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: _t_str_bytes, salt: _t_opt_str_bytes = None, **kwargs: _t.Any - ) -> _t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise _t.cast(BadSignature, last_exception) - - def load(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: _t_str_bytes, salt: _t_opt_str_bytes = None - ) -> _t_load_unsafe: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: _t_str_bytes, - salt: _t_opt_str_bytes, - load_kwargs: _t_opt_kwargs = None, - load_payload_kwargs: _t_opt_kwargs = None, - ) -> _t_load_unsafe: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t_load_unsafe: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/signer.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/signer.py deleted file mode 100644 index aa12005..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,257 +0,0 @@ -import hashlib -import hmac -import typing as _t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: _t.Any = staticmethod(hashlib.sha1) - - def __init__(self, digest_method: _t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: _t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list(secret_key: _t_secret_key) -> _t.List[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: _t.Any = staticmethod(hashlib.sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: _t_secret_key, - salt: _t_opt_str_bytes = b"itsdangerous.Signer", - sep: _t_str_bytes = b".", - key_derivation: _t.Optional[str] = None, - digest_method: _t.Optional[_t.Any] = None, - algorithm: _t.Optional[SigningAlgorithm] = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: _t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: _t_opt_str_bytes = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return _t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return _t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: _t_str_bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: _t_str_bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: _t_str_bytes, sig: _t_str_bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: _t_str_bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: _t_str_bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/timed.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/timed.py deleted file mode 100644 index cad8da3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,234 +0,0 @@ -import time -import typing -import typing as _t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import Serializer -from .signer import Signer - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_opt_int = _t.Optional[int] - -if _t.TYPE_CHECKING: - import typing_extensions as _te - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: _t_str_bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @typing.overload - def unsign( # type: ignore - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: "_te.Literal[False]" = False, - ) -> bytes: - ... - - @typing.overload - def unsign( - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: "_te.Literal[True]" = True, - ) -> _t.Tuple[bytes, datetime]: - ... - - def unsign( - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: bool = False, - ) -> _t.Union[_t.Tuple[bytes, datetime], bytes]: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: _t_opt_int = None - ts_dt: _t.Optional[datetime] = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: _t_str_bytes, max_age: _t_opt_int = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: _t.Type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: _t_opt_str_bytes = None - ) -> _t.Iterator[TimestampSigner]: - return _t.cast("_t.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore - self, - s: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: bool = False, - salt: _t_opt_str_bytes = None, - ) -> _t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise _t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore - self, - s: _t_str_bytes, - max_age: _t_opt_int = None, - salt: _t_opt_str_bytes = None, - ) -> _t.Tuple[bool, _t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/url_safe.py b/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index d5a9b0c..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,80 +0,0 @@ -import typing as _t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: _t.Any, - serializer: _t.Optional[_t.Any] = None, - **kwargs: _t.Any, - ) -> _t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: _t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__init__.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__init__.py deleted file mode 100644 index e323926..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.2" diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 41d59e9..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc deleted file mode 100644 index f4e7fbf..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc deleted file mode 100644 index 6447c06..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc deleted file mode 100644 index 7007a9d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc deleted file mode 100644 index c283505..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc deleted file mode 100644 index 1f74a44..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc deleted file mode 100644 index e07f433..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc deleted file mode 100644 index 8e39000..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc deleted file mode 100644 index 150e2ee..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc deleted file mode 100644 index 32374ff..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc deleted file mode 100644 index 6fdf65c..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc deleted file mode 100644 index 40b7382..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc deleted file mode 100644 index afa00ce..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc deleted file mode 100644 index a805439..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc deleted file mode 100644 index 5bcc562..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc deleted file mode 100644 index 12f33f4..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc deleted file mode 100644 index 98183dc..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc deleted file mode 100644 index 2dffe51..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc deleted file mode 100644 index effe88d..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc deleted file mode 100644 index 4d388ef..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc deleted file mode 100644 index 061b253..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc deleted file mode 100644 index bc98103..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc deleted file mode 100644 index a971522..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc deleted file mode 100644 index 6d2eb12..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc b/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc deleted file mode 100644 index a6f35c5..0000000 Binary files a/venv_flaskchat/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc and /dev/null differ diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/_identifier.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/async_utils.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/async_utils.py deleted file mode 100644 index 1a4f389..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,84 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return t.cast("V", value) - - -async def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - async for item in t.cast("t.AsyncIterable[V]", iterable): - yield item - else: - for item in t.cast("t.Iterable[V]", iterable): - yield item - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/bccache.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/bccache.py deleted file mode 100644 index d0ddf56..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,406 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: - ... - - def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: - ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/compiler.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/compiler.py deleted file mode 100644 index 3458095..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1957 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed - and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed - and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "Frame": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "Frame": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( - self.tests, - visitor.tests, - "tests", - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(vars): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, public_names)) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import exported, async_exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline( - "async for event in parent_template.root_render_func(context):" - ) - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline( - f"{self.choose_async()}for event in" - f" context.blocks[{node.name!r}][0]({context}):", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - skip_event_yield = False - if node.with_context: - self.writeline( - f"{self.choose_async()}for event in template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)})):" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - else: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - - if not skip_event_yield: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - message = ( - "the template {included_template.__name__!r}" - f" (imported on {self.position(node)})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRefs can only be used to store values; since they use the normal - # `foo.bar` notation they will be parsed as a normal attribute access - # when used anywhere but in a `set` context - ref = frame.symbols.ref(node.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/constants.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/debug.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/debug.py deleted file mode 100644 index 7ed7e92..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: "t.Optional[Context]" = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/defaults.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/venv_flaskchat/lib/python3.11/site-packages/jinja2/environment.py b/venv_flaskchat/lib/python3.11/site-packages/jinja2/environment.py deleted file mode 100644 index ea04e8b..0000000 --- a/venv_flaskchat/lib/python3.11/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1667 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping], -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: "Environment") -> "Environment": - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = False, - ) -> "Environment": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``,, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) # type: ignore - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") # type: ignore - - @typing.overload - def compile( # type: ignore - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: - ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: - ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "