diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e09d1f0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,24 @@ +# Python bytecode / cache +__pycache__/ +*.py[cod] +*.so + +# Virtual environment +venv/ + +# Local Unix domain sockets +*.sock + +# Environment & config files (if local only) +*.env +*.local.json +system_info.json +site_info.json +frontend_config.json + +# Logs +*.log + +# OS-specific +.DS_Store +Thumbs.db diff --git a/__pycache__/app.cpython-310.pyc b/__pycache__/app.cpython-310.pyc deleted file mode 100644 index 8308846..0000000 Binary files a/__pycache__/app.cpython-310.pyc and /dev/null differ diff --git a/__pycache__/auth_utils.cpython-310.pyc b/__pycache__/auth_utils.cpython-310.pyc deleted file mode 100644 index 49e6d70..0000000 Binary files a/__pycache__/auth_utils.cpython-310.pyc and /dev/null differ diff --git a/__pycache__/core_functions.cpython-310.pyc b/__pycache__/core_functions.cpython-310.pyc deleted file mode 100644 index 5872e3d..0000000 Binary files a/__pycache__/core_functions.cpython-310.pyc and /dev/null differ diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 deleted file mode 100644 index b49d77b..0000000 --- a/venv/bin/Activate.ps1 +++ /dev/null @@ -1,247 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove VIRTUAL_ENV_PROMPT altogether. - if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { - Remove-Item -Path env:VIRTUAL_ENV_PROMPT - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } - $env:VIRTUAL_ENV_PROMPT = $Prompt -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate deleted file mode 100644 index 608bf86..0000000 --- a/venv/bin/activate +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null - fi - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - unset VIRTUAL_ENV_PROMPT - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV=/home/mjensen/network_tool/venv -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/"bin":$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1='(venv) '"${PS1:-}" - export PS1 - VIRTUAL_ENV_PROMPT='(venv) ' - export VIRTUAL_ENV_PROMPT -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null -fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh deleted file mode 100644 index 7657c5c..0000000 --- a/venv/bin/activate.csh +++ /dev/null @@ -1,26 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV /home/mjensen/network_tool/venv - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/"bin":$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - set prompt = '(venv) '"$prompt" - setenv VIRTUAL_ENV_PROMPT '(venv) ' -endif - -alias pydoc python -m pydoc - -rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish deleted file mode 100644 index 0e4a548..0000000 --- a/venv/bin/activate.fish +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source /bin/activate.fish" *from fish* -# (https://fishshell.com/); you cannot run it directly. - -function deactivate -d "Exit virtual environment and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - set -e _OLD_FISH_PROMPT_OVERRIDE - # prevents error when using nested fish instances (Issue #93858) - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - end - - set -e VIRTUAL_ENV - set -e VIRTUAL_ENV_PROMPT - if test "$argv[1]" != "nondestructive" - # Self-destruct! - functions -e deactivate - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV /home/mjensen/network_tool/venv - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/"bin $PATH - -# Unset PYTHONHOME if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # Save the current fish_prompt function as the function _old_fish_prompt. - functions -c fish_prompt _old_fish_prompt - - # With the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command. - set -l old_status $status - - # Output the venv prompt; color taken from the blue of the Python logo. - printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal) - - # Restore the return status of the previous command. - echo "exit $old_status" | . - # Output the original/"old" prompt. - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - set -gx VIRTUAL_ENV_PROMPT '(venv) ' -end diff --git a/venv/bin/flask b/venv/bin/flask deleted file mode 100755 index 519f3eb..0000000 --- a/venv/bin/flask +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from flask.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/gunicorn b/venv/bin/gunicorn deleted file mode 100755 index 1a01dcb..0000000 --- a/venv/bin/gunicorn +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from gunicorn.app.wsgiapp import run -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(run()) diff --git a/venv/bin/normalizer b/venv/bin/normalizer deleted file mode 100755 index a4f5e2f..0000000 --- a/venv/bin/normalizer +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from charset_normalizer import cli -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(cli.cli_detect()) diff --git a/venv/bin/pip b/venv/bin/pip deleted file mode 100755 index f049529..0000000 --- a/venv/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 deleted file mode 100755 index f049529..0000000 --- a/venv/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3.10 b/venv/bin/pip3.10 deleted file mode 100755 index f049529..0000000 --- a/venv/bin/pip3.10 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/mjensen/network_tool/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python deleted file mode 120000 index b8a0adb..0000000 --- a/venv/bin/python +++ /dev/null @@ -1 +0,0 @@ -python3 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 deleted file mode 120000 index ae65fda..0000000 --- a/venv/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -/usr/bin/python3 \ No newline at end of file diff --git a/venv/bin/python3.10 b/venv/bin/python3.10 deleted file mode 120000 index b8a0adb..0000000 --- a/venv/bin/python3.10 +++ /dev/null @@ -1 +0,0 @@ -python3 \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/METADATA deleted file mode 100644 index 82261f2..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/METADATA +++ /dev/null @@ -1,92 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 3.0.2 -Summary: Safely add untrusted strings to HTML/XML markup. -Maintainer-email: Pallets -License: Copyright 2010 Pallets - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED - TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source, https://github.com/pallets/markupsafe/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-File: LICENSE.txt - -# MarkupSafe - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -## Examples - -```pycon ->>> from markupsafe import Markup, escape - ->>> # escape replaces special characters and wraps in Markup ->>> escape("") -Markup('<script>alert(document.cookie);</script>') - ->>> # wrap in Markup to mark text "safe" and prevent escaping ->>> Markup("Hello") -Markup('hello') - ->>> escape(Markup("Hello")) -Markup('hello') - ->>> # Markup is a str subclass ->>> # methods and operators escape their arguments ->>> template = Markup("Hello {name}") ->>> template.format(name='"World"') -Markup('Hello "World"') -``` - -## Donate - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/RECORD deleted file mode 100644 index 6622392..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975 -MarkupSafe-3.0.2.dist-info/RECORD,, -MarkupSafe-3.0.2.dist-info/WHEEL,sha256=_kVlewavvOSnwZE_whBk3jlE_Ob-nL5GvlVcLkpXSD8,151 -MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214 -markupsafe/__pycache__/__init__.cpython-310.pyc,, -markupsafe/__pycache__/_native.cpython-310.pyc,, -markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210 -markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149 -markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so,sha256=x4RoxWgyqAEokk-AZrWvrLDxLE-dm-zZSZYV_gOiLJA,34976 -markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL deleted file mode 100644 index 78f844e..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.2.0) -Root-Is-Purelib: false -Tag: cp310-cp310-manylinux_2_17_x86_64 -Tag: cp310-cp310-manylinux2014_x86_64 - diff --git a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/venv/lib/python3.10/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/LICENSE deleted file mode 100644 index 91e18a6..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/LICENSE +++ /dev/null @@ -1,174 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/METADATA deleted file mode 100644 index fb216d4..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/METADATA +++ /dev/null @@ -1,245 +0,0 @@ -Metadata-Version: 2.1 -Name: PyNaCl -Version: 1.5.0 -Summary: Python binding to the Networking and Cryptography (NaCl) library -Home-page: https://github.com/pyca/pynacl/ -Author: The PyNaCl developers -Author-email: cryptography-dev@python.org -License: Apache License 2.0 -Platform: UNKNOWN -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Requires-Python: >=3.6 -Requires-Dist: cffi (>=1.4.1) -Provides-Extra: docs -Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs' -Requires-Dist: sphinx-rtd-theme ; extra == 'docs' -Provides-Extra: tests -Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests' -Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests' - -=============================================== -PyNaCl: Python binding to the libsodium library -=============================================== - -.. image:: https://img.shields.io/pypi/v/pynacl.svg - :target: https://pypi.org/project/PyNaCl/ - :alt: Latest Version - -.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main - :target: https://codecov.io/github/pyca/pynacl?branch=main - -.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg - :target: https://pypi.org/project/PyNaCl/ - :alt: Compatible Python Versions - -PyNaCl is a Python binding to `libsodium`_, which is a fork of the -`Networking and Cryptography library`_. These libraries have a stated goal of -improving usability, security and speed. It supports Python 3.6+ as well as -PyPy 3. - -.. _libsodium: https://github.com/jedisct1/libsodium -.. _Networking and Cryptography library: https://nacl.cr.yp.to/ - -Features --------- - -* Digital signatures -* Secret-key encryption -* Public-key encryption -* Hashing and message authentication -* Password based key derivation and password hashing - -`Changelog`_ ------------- - -.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/ - -Installation -============ - -Binary wheel install --------------------- - -PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ , -so all dependencies are included. Make sure you have an up-to-date pip -and run: - -.. code-block:: console - - $ pip install pynacl - -Faster wheel build ------------------- - -You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make`` -and enable `parallelization`_: - -.. code-block:: console - - $ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl - -Linux source build ------------------- - -PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled -with PyNaCl so to install you can run: - -.. code-block:: console - - $ pip install pynacl - -If you'd prefer to use the version of ``libsodium`` provided by your -distribution, you can disable the bundled copy during install by running: - -.. code-block:: console - - $ SODIUM_INSTALL=system pip install pynacl - -.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools - is generally discouraged, and is completely unsupported in PyNaCl's case. - -.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html - -.. _libsodium: https://github.com/jedisct1/libsodium - -.. [#many] `manylinux1 wheels `_ - are built on a baseline linux environment based on Centos 5.11 - and should work on most x86 and x86_64 glibc based linux environments. - -Changelog -========= - -1.5.0 (2022-01-07) ------------------- - -* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5. -* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1`` - wheels. -* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS - ``universal2`` wheels (the latter supports macOS ``arm64``). -* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release). -* Add inline type hints. - -1.4.0 (2020-05-25) ------------------- - -* Update ``libsodium`` to 1.0.18. -* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1`` - wheels. Continuing to produce them was a maintenance burden. -* Added support for Python 3.8, and removed support for Python 3.4. -* Add low level bindings for extracting the seed and the public key - from crypto_sign_ed25519 secret key -* Add low level bindings for deterministic random generation. -* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485) -* Fix checks on very slow builders (#481, #495) -* Add low-level bindings to ed25519 arithmetic functions -* Update low-level blake2b state implementation -* Fix wrong short-input behavior of SealedBox.decrypt() (#517) -* Raise CryptPrefixError exception instead of InvalidkeyError when trying - to check a password against a verifier stored in a unknown format (#519) -* Add support for minimal builds of libsodium. Trying to call functions - not available in a minimal build will raise an UnavailableError - exception. To compile a minimal build of the bundled libsodium, set - the SODIUM_INSTALL_MINIMAL environment variable to any non-empty - string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup. - -1.3.0 2018-09-26 ----------------- - -* Added support for Python 3.7. -* Update ``libsodium`` to 1.0.16. -* Run and test all code examples in PyNaCl docs through sphinx's - doctest builder. -* Add low-level bindings for chacha20-poly1305 AEAD constructions. -* Add low-level bindings for the chacha20-poly1305 secretstream constructions. -* Add low-level bindings for ed25519ph pre-hashed signing construction. -* Add low-level bindings for constant-time increment and addition - on fixed-precision big integers represented as little-endian - byte sequences. -* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API. -* Add low-level bindings for libsodium's crypto_kx... key exchange - construction. -* Set hypothesis deadline to None in tests/test_pwhash.py to avoid - incorrect test failures on slower processor architectures. GitHub - issue #370 - -1.2.1 - 2017-12-04 ------------------- - -* Update hypothesis minimum allowed version. -* Infrastructure: add proper configuration for readthedocs builder - runtime environment. - -1.2.0 - 2017-11-01 ------------------- - -* Update ``libsodium`` to 1.0.15. -* Infrastructure: add jenkins support for automatic build of - ``manylinux1`` binary wheels -* Added support for ``SealedBox`` construction. -* Added support for ``argon2i`` and ``argon2id`` password hashing constructs - and restructured high-level password hashing implementation to expose - the same interface for all hashers. -* Added support for 128 bit ``siphashx24`` variant of ``siphash24``. -* Added support for ``from_seed`` APIs for X25519 keypair generation. -* Dropped support for Python 3.3. - -1.1.2 - 2017-03-31 ------------------- - -* reorder link time library search path when using bundled - libsodium - -1.1.1 - 2017-03-15 ------------------- - -* Fixed a circular import bug in ``nacl.utils``. - -1.1.0 - 2017-03-14 ------------------- - -* Dropped support for Python 2.6. -* Added ``shared_key()`` method on ``Box``. -* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or - ``SecretBox`` and it will automatically generate a random nonce. -* Added support for ``siphash24``. -* Added support for ``blake2b``. -* Added support for ``scrypt``. -* Update ``libsodium`` to 1.0.11. -* Default to the bundled ``libsodium`` when compiling. -* All raised exceptions are defined mixing-in - ``nacl.exceptions.CryptoError`` - -1.0.1 - 2016-01-24 ------------------- - -* Fix an issue with absolute paths that prevented the creation of wheels. - -1.0 - 2016-01-23 ----------------- - -* PyNaCl has been ported to use the new APIs available in cffi 1.0+. - Due to this change we no longer support PyPy releases older than 2.6. -* Python 3.2 support has been dropped. -* Functions to convert between Ed25519 and Curve25519 keys have been added. - -0.3.0 - 2015-03-04 ------------------- - -* The low-level API (`nacl.c.*`) has been changed to match the - upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). - The order of arguments and return values has changed significantly. To - avoid silent failures, `nacl.c` has been removed, and replaced with - `nacl.bindings` (with the new argument ordering). If you have code which - calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review - the new docstrings and update your code/imports to match the new - conventions. - - diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/RECORD deleted file mode 100644 index 6b18358..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/RECORD +++ /dev/null @@ -1,68 +0,0 @@ -PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -PyNaCl-1.5.0.dist-info/LICENSE,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694 -PyNaCl-1.5.0.dist-info/METADATA,sha256=OJaXCiHgNRywLY9cj3X2euddUPZ4dnyyqAQMU01X4j0,8634 -PyNaCl-1.5.0.dist-info/RECORD,, -PyNaCl-1.5.0.dist-info/WHEEL,sha256=TIQeZFe3DwXBO5UGlCH1aKpf5Cx6FJLbIUqd-Sq2juI,185 -PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13 -nacl/__init__.py,sha256=0IUunzBT8_Jn0DUdHacBExOYeAEMggo8slkfjo7O0XM,1116 -nacl/__pycache__/__init__.cpython-310.pyc,, -nacl/__pycache__/encoding.cpython-310.pyc,, -nacl/__pycache__/exceptions.cpython-310.pyc,, -nacl/__pycache__/hash.cpython-310.pyc,, -nacl/__pycache__/hashlib.cpython-310.pyc,, -nacl/__pycache__/public.cpython-310.pyc,, -nacl/__pycache__/secret.cpython-310.pyc,, -nacl/__pycache__/signing.cpython-310.pyc,, -nacl/__pycache__/utils.cpython-310.pyc,, -nacl/_sodium.abi3.so,sha256=uJ6RwSnbb9wO4esR0bVUqrfFHtBOGm34IQIdmaE1fGY,2740136 -nacl/bindings/__init__.py,sha256=BDlStrds2EuUS4swOL4pnf92PWVS_CHRCptX3KhEX-s,16997 -nacl/bindings/__pycache__/__init__.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_aead.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_box.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_core.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_generichash.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_hash.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_kx.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_pwhash.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_scalarmult.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_secretbox.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_secretstream.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_shorthash.cpython-310.pyc,, -nacl/bindings/__pycache__/crypto_sign.cpython-310.pyc,, -nacl/bindings/__pycache__/randombytes.cpython-310.pyc,, -nacl/bindings/__pycache__/sodium_core.cpython-310.pyc,, -nacl/bindings/__pycache__/utils.cpython-310.pyc,, -nacl/bindings/crypto_aead.py,sha256=BIw1k_JCfr5ylZk0RF5rCFIM1fhfLkEa-aiWkrfffNE,15597 -nacl/bindings/crypto_box.py,sha256=Ox0NG2t4MsGhBAa7Kgah4o0gc99ULMsqkdX56ofOouY,10139 -nacl/bindings/crypto_core.py,sha256=6u9G3y7H-QrawO785UkFFFtwDoCkeHE63GOUl9p5-eA,13736 -nacl/bindings/crypto_generichash.py,sha256=9mX0DGIIzicr-uXrqFM1nU4tirasbixDwbcdfV7W1fc,8852 -nacl/bindings/crypto_hash.py,sha256=Rg1rsEwE3azhsQT-dNVPA4NB9VogJAKn1EfxYt0pPe0,2175 -nacl/bindings/crypto_kx.py,sha256=oZNVlNgROpHOa1XQ_uZe0tqIkdfuApeJlRnwR23_74k,6723 -nacl/bindings/crypto_pwhash.py,sha256=laVDo4xFUuGyEjtZAU510AklBF6ablBy7Z3HN1WDYjY,18848 -nacl/bindings/crypto_scalarmult.py,sha256=_DX-mst2uCnzjo6fP5HRTnhv1BC95B9gmJc3L_or16g,8244 -nacl/bindings/crypto_secretbox.py,sha256=KgZ1VvkCJDlQ85jtfe9c02VofPvuEgZEhWni-aX3MsM,2914 -nacl/bindings/crypto_secretstream.py,sha256=G0FgZS01qA5RzWzm5Bdms8Yy_lvgdZDoUYYBActPmvQ,11165 -nacl/bindings/crypto_shorthash.py,sha256=PQU7djHTLDGdVs-w_TsivjFHHp5EK5k2Yh6p-6z0T60,2603 -nacl/bindings/crypto_sign.py,sha256=53j2im9E4F79qT_2U8IfCAc3lzg0VMwEjvAPEUccVDg,10342 -nacl/bindings/randombytes.py,sha256=uBK3W4WcjgnjZdWanrX0fjYZpr9KHbBgNMl9rui-Ojc,1563 -nacl/bindings/sodium_core.py,sha256=9Y9CX--sq-TaPaQRPRpx8SWDSS9PJOja_Cqb-yqyJNQ,1039 -nacl/bindings/utils.py,sha256=KDwQnadXeNMbqEA1SmpNyCVo5k8MiUQa07QM66VzfXM,4298 -nacl/encoding.py,sha256=qTAPc2MXSkdh4cqDVY0ra6kHyViHMCmEo_re7cgGk5w,2915 -nacl/exceptions.py,sha256=GZH32aJtZgqCO4uz0LRsev8z0WyvAYuV3YVqT9AAQq4,2451 -nacl/hash.py,sha256=EYBOe6UVc9SUQINEmyuRSa1QGRSvdwdrBzTL1tdFLU8,6392 -nacl/hashlib.py,sha256=L5Fv75St8AMPvb-GhA4YqX5p1mC_Sb4HhC1NxNQMpJA,4400 -nacl/public.py,sha256=RVGCWQRjIJOmW-8sNrVLtsDjMMGx30i6UyfViGCnQNA,14792 -nacl/pwhash/__init__.py,sha256=XSDXd7wQHNLEHl0mkHfVb5lFQsp6ygHkhen718h0BSM,2675 -nacl/pwhash/__pycache__/__init__.cpython-310.pyc,, -nacl/pwhash/__pycache__/_argon2.cpython-310.pyc,, -nacl/pwhash/__pycache__/argon2i.cpython-310.pyc,, -nacl/pwhash/__pycache__/argon2id.cpython-310.pyc,, -nacl/pwhash/__pycache__/scrypt.cpython-310.pyc,, -nacl/pwhash/_argon2.py,sha256=jL1ChR9biwYh3RSuc-LJ2-W4DlVLHpir-XHGX8cpeJQ,1779 -nacl/pwhash/argon2i.py,sha256=IIvIuO9siKUu5-Wpz0SGiltLQv7Du_mi9BUE8INRK_4,4405 -nacl/pwhash/argon2id.py,sha256=H22i8O4j9Ws4L3JsXl9TRcJzDcyaVumhQRPzINAgJWM,4433 -nacl/pwhash/scrypt.py,sha256=fMr3Qht1a1EY8aebNNntfLRjinIPXtKYKKrrBhY5LDc,6986 -nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -nacl/secret.py,sha256=kauBNuP-0rb3TjU2EMBMu5Vnmzjnscp1bRqMspy5LzU,12108 -nacl/signing.py,sha256=kbTEUyHLUMaNLv1nCjxzGxCs82Qs5w8gxE_CnEwPuIU,8337 -nacl/utils.py,sha256=gmlTD1x9ZNwzHd8LpALH1CHud-Htv8ejRb3y7TyS9f0,2341 diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/WHEEL deleted file mode 100644 index b93bb21..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/WHEEL +++ /dev/null @@ -1,7 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: false -Tag: cp36-abi3-manylinux_2_17_x86_64 -Tag: cp36-abi3-manylinux2014_x86_64 -Tag: cp36-abi3-manylinux_2_24_x86_64 - diff --git a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt deleted file mode 100644 index f52507f..0000000 --- a/venv/lib/python3.10/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_sodium -nacl diff --git a/venv/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index 24944c8..0000000 Binary files a/venv/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index f707416..0000000 --- a/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,132 +0,0 @@ -import sys -import os -import re -import importlib -import warnings - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -warnings.filterwarnings('ignore', - r'.+ distutils\b.+ deprecated', - DeprecationWarning) - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils.") - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - warnings.warn("Setuptools is replacing distutils.") - mods = [name for name in sys.modules if re.match(r'distutils\b', name)] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') - return which == 'local' - - -def ensure_local_distutils(): - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - add_shim() - importlib.import_module('distutils') - remove_shim() - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - if path is not None: - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - import importlib.abc - import importlib.util - - class DistutilsLoader(importlib.abc.Loader): - - def create_module(self, spec): - return importlib.import_module('setuptools._distutils') - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader('distutils', DistutilsLoader()) - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @staticmethod - def pip_imported_during_build(): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - return any( - frame.f_globals['__file__'].endswith('setup.py') - for frame, line in traceback.walk_stack(None) - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index eb698cb..0000000 Binary files a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc b/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc deleted file mode 100644 index e84afeb..0000000 Binary files a/venv/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/_distutils_hack/override.py b/venv/lib/python3.10/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/venv/lib/python3.10/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/LICENSE deleted file mode 100644 index 11069ed..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/METADATA deleted file mode 100644 index 96f0bdf..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/METADATA +++ /dev/null @@ -1,330 +0,0 @@ -Metadata-Version: 2.2 -Name: bcrypt -Version: 4.3.0 -Summary: Modern password hashing for your software and your servers -Author-email: The Python Cryptographic Authority developers -License: Apache-2.0 -Project-URL: homepage, https://github.com/pyca/bcrypt/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Provides-Extra: tests -Requires-Dist: pytest!=3.3.0,>=3.2.1; extra == "tests" -Provides-Extra: typecheck -Requires-Dist: mypy; extra == "typecheck" - -bcrypt -====== - -.. image:: https://img.shields.io/pypi/v/bcrypt.svg - :target: https://pypi.org/project/bcrypt/ - :alt: Latest Version - -.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main - :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain - -Acceptable password hashing for your software and your servers (but you should -really use argon2id or scrypt) - - -Installation -============ - -To install bcrypt, simply: - -.. code:: console - - $ pip install bcrypt - -Note that bcrypt should build very easily on Linux provided you have a C -compiler and a Rust compiler (the minimum supported Rust version is 1.56.0). - -For Debian and Ubuntu, the following command will ensure that the required dependencies are installed: - -.. code:: console - - $ sudo apt-get install build-essential cargo - -For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed: - -.. code:: console - - $ sudo yum install gcc cargo - -For Alpine, the following command will ensure that the required dependencies are installed: - -.. code:: console - - $ apk add --update musl-dev gcc cargo - - -Alternatives -============ - -While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_. - -Changelog -========= - -Unreleased ----------- - -* Dropped support for Python 3.7. -* We now support free-threaded Python 3.13. -* We now support PyPy 3.11. -* We now publish wheels for free-threaded Python 3.13, for PyPy 3.11 on - ``manylinux``, and for ARMv7l on ``manylinux``. - -4.2.1 ------ - -* Bump Rust dependency versions - this should resolve crashes on Python 3.13 - free-threaded builds. -* We no longer build ``manylinux`` wheels for PyPy 3.9. - -4.2.0 ------ - -* Bump Rust dependency versions -* Removed the ``BCRYPT_ALLOW_RUST_163`` environment variable. - -4.1.3 ------ - -* Bump Rust dependency versions - -4.1.2 ------ - -* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors - relating to initializing a module multiple times per process. - -4.1.1 ------ - -* Fixed the type signature on the ``kdf`` method. -* Fixed packaging bug on Windows. -* Fixed incompatibility with passlib package detection assumptions. - -4.1.0 ------ - -* Dropped support for Python 3.6. -* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable) - -4.0.1 ------ - -* We now build PyPy ``manylinux`` wheels. -* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in - a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``. - -4.0.0 ------ - -* ``bcrypt`` is now implemented in Rust. Users building from source will need - to have a Rust compiler available. Nothing will change for users downloading - wheels. -* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest - ``pip`` to ensure this doesn’t cause issues downloading wheels on their - platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms. -* ``NUL`` bytes are now allowed in inputs. - - -3.2.2 ------ - -* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works. - -3.2.1 ------ - -* Added support for compilation on z/OS -* The next release of ``bcrypt`` with be 4.0 and it will require Rust at - compile time, for users building from source. There will be no additional - requirement for users who are installing from wheels. Users on most - platforms will be able to obtain a wheel by making sure they have an up to - date ``pip``. The minimum supported Rust version will be 1.56.0. -* This will be the final release for which we ship ``manylinux2010`` wheels. - Going forward the minimum supported manylinux ABI for our wheels will be - ``manylinux2014``. The vast majority of users will continue to receive - ``manylinux`` wheels provided they have an up to date ``pip``. - - -3.2.0 ------ - -* Added typehints for library functions. -* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5). -* Shipped ``abi3`` Windows wheels (requires pip >= 20). - -3.1.7 ------ - -* Set a ``setuptools`` lower bound for PEP517 wheel building. -* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce - them was a maintenance burden. - -3.1.6 ------ - -* Added support for compilation on Haiku. - -3.1.5 ------ - -* Added support for compilation on AIX. -* Dropped Python 2.6 and 3.3 support. -* Switched to using ``abi3`` wheels for Python 3. If you are not getting a - wheel on a compatible platform please upgrade your ``pip`` version. - -3.1.4 ------ - -* Fixed compilation with mingw and on illumos. - -3.1.3 ------ -* Fixed a compilation issue on Solaris. -* Added a warning when using too few rounds with ``kdf``. - -3.1.2 ------ -* Fixed a compile issue affecting big endian platforms. -* Fixed invalid escape sequence warnings on Python 3.6. -* Fixed building in non-UTF8 environments on Python 2. - -3.1.1 ------ -* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3. - -3.1.0 ------ -* Added support for ``checkpw``, a convenience method for verifying a password. -* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt. -* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug. -* Fixed compilation under Alpine Linux. - -3.0.0 ------ -* Switched the C backend to code obtained from the OpenBSD project rather than - openwall. -* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function. - -2.0.0 ------ -* Added support for an adjustible prefix when calling ``gensalt``. -* Switched to CFFI 1.0+ - -Usage ------ - -Password Hashing -~~~~~~~~~~~~~~~~ - -Hashing and then later checking that a password matches the previous hashed -password is very simple: - -.. code:: pycon - - >>> import bcrypt - >>> password = b"super secret password" - >>> # Hash a password for the first time, with a randomly-generated salt - >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt()) - >>> # Check that an unhashed password matches one that has previously been - >>> # hashed - >>> if bcrypt.checkpw(password, hashed): - ... print("It Matches!") - ... else: - ... print("It Does not Match :(") - -KDF -~~~ - -As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``. -This KDF is used in OpenSSH's newer encrypted private key format. - -.. code:: pycon - - >>> import bcrypt - >>> key = bcrypt.kdf( - ... password=b'password', - ... salt=b'salt', - ... desired_key_bytes=32, - ... rounds=100) - - -Adjustable Work Factor -~~~~~~~~~~~~~~~~~~~~~~ -One of bcrypt's features is an adjustable logarithmic work factor. To adjust -the work factor merely pass the desired number of rounds to -``bcrypt.gensalt(rounds=12)`` which defaults to 12): - -.. code:: pycon - - >>> import bcrypt - >>> password = b"super secret password" - >>> # Hash a password for the first time, with a certain number of rounds - >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14)) - >>> # Check that a unhashed password matches one that has previously been - >>> # hashed - >>> if bcrypt.checkpw(password, hashed): - ... print("It Matches!") - ... else: - ... print("It Does not Match :(") - - -Adjustable Prefix -~~~~~~~~~~~~~~~~~ - -Another one of bcrypt's features is an adjustable prefix to let you define what -libraries you'll remain compatible with. To adjust this, pass either ``2a`` or -``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object. - -As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated. - -Maximum Password Length -~~~~~~~~~~~~~~~~~~~~~~~ - -The bcrypt algorithm only handles passwords up to 72 characters, any characters -beyond that are ignored. To work around this, a common approach is to hash a -password with a cryptographic hash (such as ``sha256``) and then base64 -encode it to prevent NULL byte problems before hashing the result with -``bcrypt``: - -.. code:: pycon - - >>> password = b"an incredibly long password" * 10 - >>> hashed = bcrypt.hashpw( - ... base64.b64encode(hashlib.sha256(password).digest()), - ... bcrypt.gensalt() - ... ) - -Compatibility -------------- - -This library should be compatible with py-bcrypt and it will run on Python -3.8+ (including free-threaded builds), and PyPy 3. - -Security --------- - -``bcrypt`` follows the `same security policy as cryptography`_, if you -identify a vulnerability, we ask you to contact us privately. - -.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html -.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt -.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io -.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/RECORD deleted file mode 100644 index aa6b812..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -bcrypt-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -bcrypt-4.3.0.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850 -bcrypt-4.3.0.dist-info/METADATA,sha256=95qX7ziIfmOF0kNM95YZuWhLVfFy-6EtssVvf1ZgeWg,10042 -bcrypt-4.3.0.dist-info/RECORD,, -bcrypt-4.3.0.dist-info/WHEEL,sha256=XlovOtcAZFqrc4OSNBtc5R3yDeRHyhWP24RdDnylFpY,111 -bcrypt-4.3.0.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7 -bcrypt/__init__.py,sha256=cv-NupIX6P7o6A4PK_F0ur6IZoDr3GnvyzFO9k16wKQ,1000 -bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333 -bcrypt/__pycache__/__init__.cpython-310.pyc,, -bcrypt/_bcrypt.abi3.so,sha256=oMArVCuY_atg2H4SGNfM-zbfEgUOkd4qSiWn2nPqmXc,644928 -bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/WHEEL deleted file mode 100644 index dd95e91..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.8.2) -Root-Is-Purelib: false -Tag: cp39-abi3-manylinux_2_34_x86_64 - diff --git a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/top_level.txt deleted file mode 100644 index 7f0b6e7..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt-4.3.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -bcrypt diff --git a/venv/lib/python3.10/site-packages/bcrypt/__init__.py b/venv/lib/python3.10/site-packages/bcrypt/__init__.py deleted file mode 100644 index 81a92fd..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ._bcrypt import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - checkpw, - gensalt, - hashpw, - kdf, -) -from ._bcrypt import ( - __version_ex__ as __version__, -) - -__all__ = [ - "__author__", - "__copyright__", - "__email__", - "__license__", - "__summary__", - "__title__", - "__uri__", - "__version__", - "checkpw", - "gensalt", - "hashpw", - "kdf", -] diff --git a/venv/lib/python3.10/site-packages/bcrypt/__init__.pyi b/venv/lib/python3.10/site-packages/bcrypt/__init__.pyi deleted file mode 100644 index 12e4a2e..0000000 --- a/venv/lib/python3.10/site-packages/bcrypt/__init__.pyi +++ /dev/null @@ -1,10 +0,0 @@ -def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ... -def hashpw(password: bytes, salt: bytes) -> bytes: ... -def checkpw(password: bytes, hashed_password: bytes) -> bool: ... -def kdf( - password: bytes, - salt: bytes, - desired_key_bytes: int, - rounds: int, - ignore_few_rounds: bool = False, -) -> bytes: ... diff --git a/venv/lib/python3.10/site-packages/bcrypt/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/bcrypt/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index aff812e..0000000 Binary files a/venv/lib/python3.10/site-packages/bcrypt/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/bcrypt/_bcrypt.abi3.so b/venv/lib/python3.10/site-packages/bcrypt/_bcrypt.abi3.so deleted file mode 100755 index ae9f55b..0000000 Binary files a/venv/lib/python3.10/site-packages/bcrypt/_bcrypt.abi3.so and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/bcrypt/py.typed b/venv/lib/python3.10/site-packages/bcrypt/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/LICENSE.txt deleted file mode 100644 index 79c9825..0000000 --- a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2010 Jason Kirtland - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/METADATA deleted file mode 100644 index 6d343f5..0000000 --- a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.3 -Name: blinker -Version: 1.9.0 -Summary: Fast, simple object-to-object and broadcast signaling -Author: Jason Kirtland -Maintainer-email: Pallets Ecosystem -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://blinker.readthedocs.io -Project-URL: Source, https://github.com/pallets-eco/blinker/ - -# Blinker - -Blinker provides a fast dispatching system that allows any number of -interested parties to subscribe to events, or "signals". - - -## Pallets Community Ecosystem - -> [!IMPORTANT]\ -> This project is part of the Pallets Community Ecosystem. Pallets is the open -> source organization that maintains Flask; Pallets-Eco enables community -> maintenance of related projects. If you are interested in helping maintain -> this project, please reach out on [the Pallets Discord server][discord]. -> -> [discord]: https://discord.gg/pallets - - -## Example - -Signal receivers can subscribe to specific senders or receive signals -sent by any sender. - -```pycon ->>> from blinker import signal ->>> started = signal('round-started') ->>> def each(round): -... print(f"Round {round}") -... ->>> started.connect(each) - ->>> def round_two(round): -... print("This is round two.") -... ->>> started.connect(round_two, sender=2) - ->>> for round in range(1, 4): -... started.send(round) -... -Round 1! -Round 2! -This is round two. -Round 3! -``` - diff --git a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/RECORD deleted file mode 100644 index 3f9f399..0000000 --- a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 -blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633 -blinker-1.9.0.dist-info/RECORD,, -blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 -blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317 -blinker/__pycache__/__init__.cpython-310.pyc,, -blinker/__pycache__/_utilities.cpython-310.pyc,, -blinker/__pycache__/base.cpython-310.pyc,, -blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675 -blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132 -blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/WHEEL deleted file mode 100644 index e3c6fee..0000000 --- a/venv/lib/python3.10/site-packages/blinker-1.9.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.10.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/blinker/__init__.py b/venv/lib/python3.10/site-packages/blinker/__init__.py deleted file mode 100644 index 1772fa4..0000000 --- a/venv/lib/python3.10/site-packages/blinker/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from .base import ANY -from .base import default_namespace -from .base import NamedSignal -from .base import Namespace -from .base import Signal -from .base import signal - -__all__ = [ - "ANY", - "default_namespace", - "NamedSignal", - "Namespace", - "Signal", - "signal", -] diff --git a/venv/lib/python3.10/site-packages/blinker/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/blinker/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index f680c27..0000000 Binary files a/venv/lib/python3.10/site-packages/blinker/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/blinker/__pycache__/_utilities.cpython-310.pyc b/venv/lib/python3.10/site-packages/blinker/__pycache__/_utilities.cpython-310.pyc deleted file mode 100644 index 6e09b84..0000000 Binary files a/venv/lib/python3.10/site-packages/blinker/__pycache__/_utilities.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/blinker/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/blinker/__pycache__/base.cpython-310.pyc deleted file mode 100644 index cd8fa88..0000000 Binary files a/venv/lib/python3.10/site-packages/blinker/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/blinker/_utilities.py b/venv/lib/python3.10/site-packages/blinker/_utilities.py deleted file mode 100644 index 000c902..0000000 --- a/venv/lib/python3.10/site-packages/blinker/_utilities.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import inspect -import typing as t -from weakref import ref -from weakref import WeakMethod - -T = t.TypeVar("T") - - -class Symbol: - """A constant symbol, nicer than ``object()``. Repeated calls return the - same instance. - - >>> Symbol('foo') is Symbol('foo') - True - >>> Symbol('foo') - foo - """ - - symbols: t.ClassVar[dict[str, Symbol]] = {} - - def __new__(cls, name: str) -> Symbol: - if name in cls.symbols: - return cls.symbols[name] - - obj = super().__new__(cls) - cls.symbols[name] = obj - return obj - - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return self.name - - def __getnewargs__(self) -> tuple[t.Any, ...]: - return (self.name,) - - -def make_id(obj: object) -> c.Hashable: - """Get a stable identifier for a receiver or sender, to be used as a dict - key or in a set. - """ - if inspect.ismethod(obj): - # The id of a bound method is not stable, but the id of the unbound - # function and instance are. - return id(obj.__func__), id(obj.__self__) - - if isinstance(obj, (str, int)): - # Instances with the same value always compare equal and have the same - # hash, even if the id may change. - return obj - - # Assume other types are not hashable but will always be the same instance. - return id(obj) - - -def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]: - if inspect.ismethod(obj): - return WeakMethod(obj, callback) # type: ignore[arg-type, return-value] - - return ref(obj, callback) diff --git a/venv/lib/python3.10/site-packages/blinker/base.py b/venv/lib/python3.10/site-packages/blinker/base.py deleted file mode 100644 index d051b94..0000000 --- a/venv/lib/python3.10/site-packages/blinker/base.py +++ /dev/null @@ -1,512 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import sys -import typing as t -import weakref -from collections import defaultdict -from contextlib import contextmanager -from functools import cached_property -from inspect import iscoroutinefunction - -from ._utilities import make_id -from ._utilities import make_ref -from ._utilities import Symbol - -F = t.TypeVar("F", bound=c.Callable[..., t.Any]) - -ANY = Symbol("ANY") -"""Symbol for "any sender".""" - -ANY_ID = 0 - - -class Signal: - """A notification emitter. - - :param doc: The docstring for the signal. - """ - - ANY = ANY - """An alias for the :data:`~blinker.ANY` sender symbol.""" - - set_class: type[set[t.Any]] = set - """The set class to use for tracking connected receivers and senders. - Python's ``set`` is unordered. If receivers must be dispatched in the order - they were connected, an ordered set implementation can be used. - - .. versionadded:: 1.7 - """ - - @cached_property - def receiver_connected(self) -> Signal: - """Emitted at the end of each :meth:`connect` call. - - The signal sender is the signal instance, and the :meth:`connect` - arguments are passed through: ``receiver``, ``sender``, and ``weak``. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver connects.") - - @cached_property - def receiver_disconnected(self) -> Signal: - """Emitted at the end of each :meth:`disconnect` call. - - The sender is the signal instance, and the :meth:`disconnect` arguments - are passed through: ``receiver`` and ``sender``. - - This signal is emitted **only** when :meth:`disconnect` is called - explicitly. This signal cannot be emitted by an automatic disconnect - when a weakly referenced receiver or sender goes out of scope, as the - instance is no longer be available to be used as the sender for this - signal. - - An alternative approach is available by subscribing to - :attr:`receiver_connected` and setting up a custom weakref cleanup - callback on weak receivers and senders. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver disconnects.") - - def __init__(self, doc: str | None = None) -> None: - if doc: - self.__doc__ = doc - - self.receivers: dict[ - t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any] - ] = {} - """The map of connected receivers. Useful to quickly check if any - receivers are connected to the signal: ``if s.receivers:``. The - structure and data is not part of the public API, but checking its - boolean value is. - """ - - self.is_muted: bool = False - self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {} - - def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F: - """Connect ``receiver`` to be called when the signal is sent by - ``sender``. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends. - """ - receiver_id = make_id(receiver) - sender_id = ANY_ID if sender is ANY else make_id(sender) - - if weak: - self.receivers[receiver_id] = make_ref( - receiver, self._make_cleanup_receiver(receiver_id) - ) - else: - self.receivers[receiver_id] = receiver - - self._by_sender[sender_id].add(receiver_id) - self._by_receiver[receiver_id].add(sender_id) - - if sender is not ANY and sender_id not in self._weak_senders: - # store a cleanup for weakref-able senders - try: - self._weak_senders[sender_id] = make_ref( - sender, self._make_cleanup_sender(sender_id) - ) - except TypeError: - pass - - if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: - try: - self.receiver_connected.send( - self, receiver=receiver, sender=sender, weak=weak - ) - except TypeError: - # TODO no explanation or test for this - self.disconnect(receiver, sender) - raise - - return receiver - - def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]: - """Connect the decorated function to be called when the signal is sent - by ``sender``. - - The decorated function will be called when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument along - with any extra keyword arguments. - - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends.= - - .. versionadded:: 1.1 - """ - - def decorator(fn: F) -> F: - self.connect(fn, sender, weak) - return fn - - return decorator - - @contextmanager - def connected_to( - self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY - ) -> c.Generator[None, None, None]: - """A context manager that temporarily connects ``receiver`` to the - signal while a ``with`` block executes. When the block exits, the - receiver is disconnected. Useful for tests. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. - - .. versionadded:: 1.1 - """ - self.connect(receiver, sender=sender, weak=False) - - try: - yield None - finally: - self.disconnect(receiver) - - @contextmanager - def muted(self) -> c.Generator[None, None, None]: - """A context manager that temporarily disables the signal. No receivers - will be called if the signal is sent, until the ``with`` block exits. - Useful for tests. - """ - self.is_muted = True - - try: - yield None - finally: - self.is_muted = False - - def send( - self, - sender: t.Any | None = None, - /, - *, - _async_wrapper: c.Callable[ - [c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Call all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _async_wrapper: Will be called on any receivers that are async - coroutines to turn them into sync callables. For example, could run - the receiver with an event loop. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionchanged:: 1.7 - Added the ``_async_wrapper`` argument. - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if iscoroutinefunction(receiver): - if _async_wrapper is None: - raise RuntimeError("Cannot send to a coroutine function.") - - result = _async_wrapper(receiver)(sender, **kwargs) - else: - result = receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - async def send_async( - self, - sender: t.Any | None = None, - /, - *, - _sync_wrapper: c.Callable[ - [c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Await all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _sync_wrapper: Will be called on any receivers that are sync - callables to turn them into async coroutines. For example, - could call the receiver in a thread. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionadded:: 1.7 - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if not iscoroutinefunction(receiver): - if _sync_wrapper is None: - raise RuntimeError("Cannot send to a non-coroutine function.") - - result = await _sync_wrapper(receiver)(sender, **kwargs) - else: - result = await receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - def has_receivers_for(self, sender: t.Any) -> bool: - """Check if there is at least one receiver that will be called with the - given ``sender``. A receiver connected to :data:`ANY` will always be - called, regardless of sender. Does not check if weakly referenced - receivers are still live. See :meth:`receivers_for` for a stronger - search. - - :param sender: Check for receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - if not self.receivers: - return False - - if self._by_sender[ANY_ID]: - return True - - if sender is ANY: - return False - - return make_id(sender) in self._by_sender - - def receivers_for( - self, sender: t.Any - ) -> c.Generator[c.Callable[..., t.Any], None, None]: - """Yield each receiver to be called for ``sender``, in addition to those - to be called for :data:`ANY`. Weakly referenced receivers that are not - live will be disconnected and skipped. - - :param sender: Yield receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - # TODO: test receivers_for(ANY) - if not self.receivers: - return - - sender_id = make_id(sender) - - if sender_id in self._by_sender: - ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] - else: - ids = self._by_sender[ANY_ID].copy() - - for receiver_id in ids: - receiver = self.receivers.get(receiver_id) - - if receiver is None: - continue - - if isinstance(receiver, weakref.ref): - strong = receiver() - - if strong is None: - self._disconnect(receiver_id, ANY_ID) - continue - - yield strong - else: - yield receiver - - def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None: - """Disconnect ``receiver`` from being called when the signal is sent by - ``sender``. - - :param receiver: A connected receiver callable. - :param sender: Disconnect from only this sender. By default, disconnect - from all senders. - """ - sender_id: c.Hashable - - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = make_id(sender) - - receiver_id = make_id(receiver) - self._disconnect(receiver_id, sender_id) - - if ( - "receiver_disconnected" in self.__dict__ - and self.receiver_disconnected.receivers - ): - self.receiver_disconnected.send(self, receiver=receiver, sender=sender) - - def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None: - if sender_id == ANY_ID: - if self._by_receiver.pop(receiver_id, None) is not None: - for bucket in self._by_sender.values(): - bucket.discard(receiver_id) - - self.receivers.pop(receiver_id, None) - else: - self._by_sender[sender_id].discard(receiver_id) - self._by_receiver[receiver_id].discard(sender_id) - - def _make_cleanup_receiver( - self, receiver_id: c.Hashable - ) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]: - """Create a callback function to disconnect a weakly referenced - receiver when it is garbage collected. - """ - - def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None: - # If the interpreter is shutting down, disconnecting can result in a - # weird ignored exception. Don't call it in that case. - if not sys.is_finalizing(): - self._disconnect(receiver_id, ANY_ID) - - return cleanup - - def _make_cleanup_sender( - self, sender_id: c.Hashable - ) -> c.Callable[[weakref.ref[t.Any]], None]: - """Create a callback function to disconnect all receivers for a weakly - referenced sender when it is garbage collected. - """ - assert sender_id != ANY_ID - - def cleanup(ref: weakref.ref[t.Any]) -> None: - self._weak_senders.pop(sender_id, None) - - for receiver_id in self._by_sender.pop(sender_id, ()): - self._by_receiver[receiver_id].discard(sender_id) - - return cleanup - - def _cleanup_bookkeeping(self) -> None: - """Prune unused sender/receiver bookkeeping. Not threadsafe. - - Connecting & disconnecting leaves behind a small amount of bookkeeping - data. Typical workloads using Blinker, for example in most web apps, - Flask, CLI scripts, etc., are not adversely affected by this - bookkeeping. - - With a long-running process performing dynamic signal routing with high - volume, e.g. connecting to function closures, senders are all unique - object instances. Doing all of this over and over may cause memory usage - to grow due to extraneous bookkeeping. (An empty ``set`` for each stale - sender/receiver pair.) - - This method will prune that bookkeeping away, with the caveat that such - pruning is not threadsafe. The risk is that cleanup of a fully - disconnected receiver/sender pair occurs while another thread is - connecting that same pair. If you are in the highly dynamic, unique - receiver/sender situation that has lead you to this method, that failure - mode is perhaps not a big deal for you. - """ - for mapping in (self._by_sender, self._by_receiver): - for ident, bucket in list(mapping.items()): - if not bucket: - mapping.pop(ident, None) - - def _clear_state(self) -> None: - """Disconnect all receivers and senders. Useful for tests.""" - self._weak_senders.clear() - self.receivers.clear() - self._by_sender.clear() - self._by_receiver.clear() - - -class NamedSignal(Signal): - """A named generic notification emitter. The name is not used by the signal - itself, but matches the key in the :class:`Namespace` that it belongs to. - - :param name: The name of the signal within the namespace. - :param doc: The docstring for the signal. - """ - - def __init__(self, name: str, doc: str | None = None) -> None: - super().__init__(doc) - - #: The name of this signal. - self.name: str = name - - def __repr__(self) -> str: - base = super().__repr__() - return f"{base[:-1]}; {self.name!r}>" # noqa: E702 - - -class Namespace(dict[str, NamedSignal]): - """A dict mapping names to signals.""" - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` for the given ``name``, creating it - if required. Repeated calls with the same name return the same signal. - - :param name: The name of the signal. - :param doc: The docstring of the signal. - """ - if name not in self: - self[name] = NamedSignal(name, doc) - - return self[name] - - -class _PNamespaceSignal(t.Protocol): - def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ... - - -default_namespace: Namespace = Namespace() -"""A default :class:`Namespace` for creating named signals. :func:`signal` -creates a :class:`NamedSignal` in this namespace. -""" - -signal: _PNamespaceSignal = default_namespace.signal -"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given -``name``, creating it if required. Repeated calls with the same name return the -same signal. -""" diff --git a/venv/lib/python3.10/site-packages/blinker/py.typed b/venv/lib/python3.10/site-packages/blinker/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/METADATA b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/METADATA deleted file mode 100644 index a2dece8..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/METADATA +++ /dev/null @@ -1,77 +0,0 @@ -Metadata-Version: 2.4 -Name: certifi -Version: 2025.7.14 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.7 -License-File: LICENSE -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: license -Dynamic: license-file -Dynamic: project-url -Dynamic: requires-python -Dynamic: summary - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/RECORD b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/RECORD deleted file mode 100644 index ae09cd6..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -certifi-2025.7.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2025.7.14.dist-info/METADATA,sha256=7cmDKO0a-fVcP_WCfZ4DpLKgDMu_AEAYhTT5lswQToY,2423 -certifi-2025.7.14.dist-info/RECORD,, -certifi-2025.7.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -certifi-2025.7.14.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 -certifi-2025.7.14.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=dvNDUdAXp-hzoYcf09PXzLmHIlPfypaBQPFp5esDXyo,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-310.pyc,, -certifi/__pycache__/__main__.cpython-310.pyc,, -certifi/__pycache__/core.cpython-310.pyc,, -certifi/cacert.pem,sha256=lm3cYJxEv9SoAx4Atc3NiT1D92d965vcID6H39f_93o,290057 -certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/WHEEL b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/WHEEL deleted file mode 100644 index e7fa31b..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE deleted file mode 100644 index 62b076c..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/venv/lib/python3.10/site-packages/certifi-2025.7.14.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/venv/lib/python3.10/site-packages/certifi/__init__.py b/venv/lib/python3.10/site-packages/certifi/__init__.py deleted file mode 100644 index e837049..0000000 --- a/venv/lib/python3.10/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2025.07.14" diff --git a/venv/lib/python3.10/site-packages/certifi/__main__.py b/venv/lib/python3.10/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5d..0000000 --- a/venv/lib/python3.10/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index a7c6b64..0000000 Binary files a/venv/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 23ec658..0000000 Binary files a/venv/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc deleted file mode 100644 index 740efd2..0000000 Binary files a/venv/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/certifi/cacert.pem b/venv/lib/python3.10/site-packages/certifi/cacert.pem deleted file mode 100644 index 64c05d7..0000000 --- a/venv/lib/python3.10/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4778 +0,0 @@ - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Label: "GLOBALTRUST 2020" -# Serial: 109160994242082918454945253 -# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 -# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 -# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a ------BEGIN CERTIFICATE----- -MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG -A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw -FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx -MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u -aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b -RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z -YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 -QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw -yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ -BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ -SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH -r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 -4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me -dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw -q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 -nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu -H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA -VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC -XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd -6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf -+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi -kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 -wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB -TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C -MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn -4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I -aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy -qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G3" -# Serial: 576386314500428537169965010905813481816650257167 -# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 -# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 -# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 ------BEGIN CERTIFICATE----- -MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM -BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe -Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw -IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU -cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS -T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK -AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 -nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep -qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA -yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs -hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX -zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv -kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT -f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA -uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB -o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih -MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E -BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 -wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 -XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 -JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j -ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV -VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx -xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on -AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d -7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj -gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV -+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo -FGWsJwt0ivKH ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G4" -# Serial: 451799571007117016466790293371524403291602933463 -# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb -# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a -# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c ------BEGIN CERTIFICATE----- -MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw -WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y -MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD -VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz -dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx -s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw -LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD -pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE -AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR -UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj -/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Label: "CommScope Public Trust ECC Root-01" -# Serial: 385011430473757362783587124273108818652468453534 -# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 -# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d -# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b ------BEGIN CERTIFICATE----- -MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa -Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C -flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE -hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq -hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg -2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS -Um9poIyNStDuiw7LR47QjRE= ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Label: "CommScope Public Trust ECC Root-02" -# Serial: 234015080301808452132356021271193974922492992893 -# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 -# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 -# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a ------BEGIN CERTIFICATE----- -MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa -Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL -j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU -v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq -hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n -ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV -mkzw5l4lIhVtwodZ0LKOag== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Label: "CommScope Public Trust RSA Root-01" -# Serial: 354030733275608256394402989253558293562031411421 -# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 -# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 -# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 -NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk -YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh -suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al -DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj -WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl -P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 -KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p -UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ -kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO -Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB -Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U -CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ -KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 -NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ -nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ -QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v -trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a -aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD -j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 -Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w -lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn -YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc -icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Label: "CommScope Public Trust RSA Root-02" -# Serial: 480062499834624527752716769107743131258796508494 -# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa -# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae -# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 -NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE -NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 -kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C -rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz -hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 -LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs -n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku -FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 -kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 -wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v -wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs -5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ -KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB -KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 -+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme -APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq -pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT -6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF -sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt -PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d -lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 -v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O -rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS ECC Root 2020" -# Serial: 72082518505882327255703894282316633856 -# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd -# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec -# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 ------BEGIN CERTIFICATE----- -MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw -CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH -bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw -MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx -JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE -AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O -tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP -f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA -MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di -z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn -27iQ7t0l ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS RSA Root 2023" -# Serial: 44676229530606711399881795178081572759 -# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 -# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 -# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj -MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 -eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy -MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC -REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG -A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 -cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV -cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA -U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 -Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug -BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy -8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J -co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg -8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 -rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 -mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg -+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX -gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 -p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ -pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm -9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw -M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd -GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ -CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t -xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ -w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK -L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj -X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q -ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm -dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= ------END CERTIFICATE----- - -# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" -# Serial: 65916896770016886708751106294915943533 -# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 -# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 -# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a ------BEGIN CERTIFICATE----- -MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf -e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C -cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O -BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO -PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw -hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG -XSaQpYXFuXqUPoeovQA= ------END CERTIFICATE----- - -# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA CYBER Root CA" -# Serial: 85076849864375384482682434040119489222 -# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 -# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 -# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ -MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 -IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 -WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO -LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg -Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P -40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF -avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ -34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i -JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu -j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf -Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP -2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA -S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA -oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC -kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW -5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd -BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB -AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t -tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn -68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn -TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t -RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx -f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI -Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz -8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 -NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX -xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 -t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA12" -# Serial: 587887345431707215246142177076162061960426065942 -# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 -# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 -# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw -NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF -KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt -p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd -J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur -FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J -hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K -h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF -AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld -mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ -mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA -8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV -55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ -yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA14" -# Serial: 575790784512929437950770173562378038616896959179 -# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 -# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f -# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw -NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ -FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg -vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy -6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo -/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J -kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ -0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib -y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac -18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs -0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB -SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL -ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk -86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E -rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib -ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT -zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS -DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 -2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo -FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy -K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 -dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl -Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB -365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c -JRNItX+S ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA15" -# Serial: 126083514594751269499665114766174399806381178503 -# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 -# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d -# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a ------BEGIN CERTIFICATE----- -MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw -UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM -dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy -NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl -cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 -IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 -wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR -ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT -9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp -4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 -bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 2 2023" -# Serial: 153168538924886464690566649552453098598 -# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 -# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 -# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw -OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr -i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE -gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 -k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT -Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl -2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U -cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP -/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS -uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ -0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N -DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ -XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 -GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI -FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n -riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR -VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc -LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn -4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD -hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG -koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 -ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS -Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 -knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ -hJ65bvspmZDogNOfJA== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS ECC Root CA" -# Serial: 310892014698942880364840003424242768478804666567 -# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c -# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 -# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 ------BEGIN CERTIFICATE----- -MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw -WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw -NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE -ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB -c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ -AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp -guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw -DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 -L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR -OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS RSA Root CA" -# Serial: 160405846464868906657516898462547310235378010780 -# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 -# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa -# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 ------BEGIN CERTIFICATE----- -MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM -BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN -MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG -A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 -c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC -AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ -NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ -Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 -HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 -ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb -xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX -i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ -UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j -TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT -bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 -S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 -Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 -iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt -7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp -2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ -g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj -pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M -pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP -XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe -SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 -ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy -323imttUQ/hHWKNddBWcwauwxzQ= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 2 2023" -# Serial: 139766439402180512324132425437959641711 -# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 -# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b -# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw -OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK -F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE -7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe -EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 -lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb -RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV -jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc -jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx -TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ -ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk -hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF -NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH -kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 -QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 -pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q -3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU -t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX -cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 -ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT -2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs -7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP -gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst -Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh -XBxvWHZks/wCuPWdCg== ------END CERTIFICATE----- - -# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Label: "SwissSign RSA TLS Root CA 2022 - 1" -# Serial: 388078645722908516278762308316089881486363258315 -# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 -# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce -# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 ------BEGIN CERTIFICATE----- -MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE -AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx -MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT -d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg -MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX -vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 -LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX -5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE -EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt -/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x -0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 -KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM -0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd -OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta -clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK -wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 -DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL -BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 -10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz -Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ -iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc -gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM -ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF -LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp -zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td -Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 -rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO -gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ ------END CERTIFICATE----- diff --git a/venv/lib/python3.10/site-packages/certifi/core.py b/venv/lib/python3.10/site-packages/certifi/core.py deleted file mode 100644 index 1c9661c..0000000 --- a/venv/lib/python3.10/site-packages/certifi/core.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys -import atexit - -def exit_cacert_ctx() -> None: - _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -else: - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.10/site-packages/certifi/py.typed b/venv/lib/python3.10/site-packages/certifi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE deleted file mode 100644 index 29225ee..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE +++ /dev/null @@ -1,26 +0,0 @@ - -Except when otherwise stated (look for LICENSE files in directories or -information at the beginning of each file) all software and -documentation is licensed as follows: - - The MIT License - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or - sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included - in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA deleted file mode 100644 index 60b0779..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA +++ /dev/null @@ -1,40 +0,0 @@ -Metadata-Version: 2.1 -Name: cffi -Version: 1.17.1 -Summary: Foreign Function Interface for Python calling C code. -Home-page: http://cffi.readthedocs.org -Author: Armin Rigo, Maciej Fijalkowski -Author-email: python-cffi@googlegroups.com -License: MIT -Project-URL: Documentation, http://cffi.readthedocs.org/ -Project-URL: Source Code, https://github.com/python-cffi/cffi -Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues -Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html -Project-URL: Downloads, https://github.com/python-cffi/cffi/releases -Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: License :: OSI Approved :: MIT License -Requires-Python: >=3.8 -License-File: LICENSE -Requires-Dist: pycparser - - -CFFI -==== - -Foreign Function Interface for Python calling C code. -Please see the `Documentation `_. - -Contact -------- - -`Mailing list `_ diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD deleted file mode 100644 index 9e7aa9f..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD +++ /dev/null @@ -1,48 +0,0 @@ -_cffi_backend.cpython-310-x86_64-linux-gnu.so,sha256=pciUVwDoiYkGtuoos7gi5U2TSTeBHVoDkneECMzaObI,985520 -cffi-1.17.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cffi-1.17.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 -cffi-1.17.1.dist-info/METADATA,sha256=u6nuvP_qPJKu2zvIbi2zkGzVu7KjnnRIYUFyIrOY3j4,1531 -cffi-1.17.1.dist-info/RECORD,, -cffi-1.17.1.dist-info/WHEEL,sha256=AxiTY2sz_GcPOsKDeggQV_FGgAhpyJSKs70WYTq6kog,151 -cffi-1.17.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 -cffi-1.17.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 -cffi/__init__.py,sha256=H6t_ebva6EeHpUuItFLW1gbRp94eZRNJODLaWKdbx1I,513 -cffi/__pycache__/__init__.cpython-310.pyc,, -cffi/__pycache__/_imp_emulation.cpython-310.pyc,, -cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc,, -cffi/__pycache__/api.cpython-310.pyc,, -cffi/__pycache__/backend_ctypes.cpython-310.pyc,, -cffi/__pycache__/cffi_opcode.cpython-310.pyc,, -cffi/__pycache__/commontypes.cpython-310.pyc,, -cffi/__pycache__/cparser.cpython-310.pyc,, -cffi/__pycache__/error.cpython-310.pyc,, -cffi/__pycache__/ffiplatform.cpython-310.pyc,, -cffi/__pycache__/lock.cpython-310.pyc,, -cffi/__pycache__/model.cpython-310.pyc,, -cffi/__pycache__/pkgconfig.cpython-310.pyc,, -cffi/__pycache__/recompiler.cpython-310.pyc,, -cffi/__pycache__/setuptools_ext.cpython-310.pyc,, -cffi/__pycache__/vengine_cpy.cpython-310.pyc,, -cffi/__pycache__/vengine_gen.cpython-310.pyc,, -cffi/__pycache__/verifier.cpython-310.pyc,, -cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 -cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 -cffi/_embedding.h,sha256=EDKw5QrLvQoe3uosXB3H1xPVTYxsn33eV3A43zsA_Fw,18787 -cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 -cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 -cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 -cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 -cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 -cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 -cffi/cparser.py,sha256=0qI3mEzZSNVcCangoyXOoAcL-RhpQL08eG8798T024s,44789 -cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 -cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 -cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 -cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 -cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 -cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 -cffi/recompiler.py,sha256=sim4Tm7lamt2Jn8uzKN0wMYp6ODByk3g7of47-h9LD4,65367 -cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871 -cffi/vengine_cpy.py,sha256=8UagT6ZEOZf6Dju7_CfNulue8CnsHLEzJYhnqUhoF04,43752 -cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 -cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL deleted file mode 100644 index 59abe6e..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (74.1.1) -Root-Is-Purelib: false -Tag: cp310-cp310-manylinux_2_17_x86_64 -Tag: cp310-cp310-manylinux2014_x86_64 - diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt deleted file mode 100644 index 4b0274f..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[distutils.setup_keywords] -cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt deleted file mode 100644 index f645779..0000000 --- a/venv/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_cffi_backend -cffi diff --git a/venv/lib/python3.10/site-packages/cffi/__init__.py b/venv/lib/python3.10/site-packages/cffi/__init__.py deleted file mode 100644 index 2e35a38..0000000 --- a/venv/lib/python3.10/site-packages/cffi/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', - 'FFIError'] - -from .api import FFI -from .error import CDefError, FFIError, VerificationError, VerificationMissing -from .error import PkgConfigError - -__version__ = "1.17.1" -__version_info__ = (1, 17, 1) - -# The verifier module file names are based on the CRC32 of a string that -# contains the following version number. It may be older than __version__ -# if nothing is clearly incompatible. -__version_verifier_modules__ = "0.8.6" diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 3a0d8d6..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc deleted file mode 100644 index 20f0cdc..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc deleted file mode 100644 index 5083250..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc deleted file mode 100644 index ac3be2b..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc deleted file mode 100644 index ce8e23c..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc deleted file mode 100644 index cfd0fb7..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc deleted file mode 100644 index bd80389..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc deleted file mode 100644 index b997cf5..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc deleted file mode 100644 index ae1e852..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc deleted file mode 100644 index 2e9e22f..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc deleted file mode 100644 index 0181b99..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc deleted file mode 100644 index 65eccb0..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc deleted file mode 100644 index adca330..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc deleted file mode 100644 index 4d17d1c..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc deleted file mode 100644 index a416742..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc deleted file mode 100644 index 3dd119c..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc deleted file mode 100644 index 8f5d5dd..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc b/venv/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc deleted file mode 100644 index 4977228..0000000 Binary files a/venv/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cffi/_cffi_errors.h b/venv/lib/python3.10/site-packages/cffi/_cffi_errors.h deleted file mode 100644 index 158e059..0000000 --- a/venv/lib/python3.10/site-packages/cffi/_cffi_errors.h +++ /dev/null @@ -1,149 +0,0 @@ -#ifndef CFFI_MESSAGEBOX -# ifdef _MSC_VER -# define CFFI_MESSAGEBOX 1 -# else -# define CFFI_MESSAGEBOX 0 -# endif -#endif - - -#if CFFI_MESSAGEBOX -/* Windows only: logic to take the Python-CFFI embedding logic - initialization errors and display them in a background thread - with MessageBox. The idea is that if the whole program closes - as a result of this problem, then likely it is already a console - program and you can read the stderr output in the console too. - If it is not a console program, then it will likely show its own - dialog to complain, or generally not abruptly close, and for this - case the background thread should stay alive. -*/ -static void *volatile _cffi_bootstrap_text; - -static PyObject *_cffi_start_error_capture(void) -{ - PyObject *result = NULL; - PyObject *x, *m, *bi; - - if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, - (void *)1, NULL) != NULL) - return (PyObject *)1; - - m = PyImport_AddModule("_cffi_error_capture"); - if (m == NULL) - goto error; - - result = PyModule_GetDict(m); - if (result == NULL) - goto error; - -#if PY_MAJOR_VERSION >= 3 - bi = PyImport_ImportModule("builtins"); -#else - bi = PyImport_ImportModule("__builtin__"); -#endif - if (bi == NULL) - goto error; - PyDict_SetItemString(result, "__builtins__", bi); - Py_DECREF(bi); - - x = PyRun_String( - "import sys\n" - "class FileLike:\n" - " def write(self, x):\n" - " try:\n" - " of.write(x)\n" - " except: pass\n" - " self.buf += x\n" - " def flush(self):\n" - " pass\n" - "fl = FileLike()\n" - "fl.buf = ''\n" - "of = sys.stderr\n" - "sys.stderr = fl\n" - "def done():\n" - " sys.stderr = of\n" - " return fl.buf\n", /* make sure the returned value stays alive */ - Py_file_input, - result, result); - Py_XDECREF(x); - - error: - if (PyErr_Occurred()) - { - PyErr_WriteUnraisable(Py_None); - PyErr_Clear(); - } - return result; -} - -#pragma comment(lib, "user32.lib") - -static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) -{ - Sleep(666); /* may be interrupted if the whole process is closing */ -#if PY_MAJOR_VERSION >= 3 - MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, - L"Python-CFFI error", - MB_OK | MB_ICONERROR); -#else - MessageBoxA(NULL, (char *)_cffi_bootstrap_text, - "Python-CFFI error", - MB_OK | MB_ICONERROR); -#endif - _cffi_bootstrap_text = NULL; - return 0; -} - -static void _cffi_stop_error_capture(PyObject *ecap) -{ - PyObject *s; - void *text; - - if (ecap == (PyObject *)1) - return; - - if (ecap == NULL) - goto error; - - s = PyRun_String("done()", Py_eval_input, ecap, ecap); - if (s == NULL) - goto error; - - /* Show a dialog box, but in a background thread, and - never show multiple dialog boxes at once. */ -#if PY_MAJOR_VERSION >= 3 - text = PyUnicode_AsWideCharString(s, NULL); -#else - text = PyString_AsString(s); -#endif - - _cffi_bootstrap_text = text; - - if (text != NULL) - { - HANDLE h; - h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, - NULL, 0, NULL); - if (h != NULL) - CloseHandle(h); - } - /* decref the string, but it should stay alive as 'fl.buf' - in the small module above. It will really be freed only if - we later get another similar error. So it's a leak of at - most one copy of the small module. That's fine for this - situation which is usually a "fatal error" anyway. */ - Py_DECREF(s); - PyErr_Clear(); - return; - - error: - _cffi_bootstrap_text = NULL; - PyErr_Clear(); -} - -#else - -static PyObject *_cffi_start_error_capture(void) { return NULL; } -static void _cffi_stop_error_capture(PyObject *ecap) { } - -#endif diff --git a/venv/lib/python3.10/site-packages/cffi/_cffi_include.h b/venv/lib/python3.10/site-packages/cffi/_cffi_include.h deleted file mode 100644 index 908a1d7..0000000 --- a/venv/lib/python3.10/site-packages/cffi/_cffi_include.h +++ /dev/null @@ -1,389 +0,0 @@ -#define _CFFI_ - -/* We try to define Py_LIMITED_API before including Python.h. - - Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and - Py_REF_DEBUG are not defined. This is a best-effort approximation: - we can learn about Py_DEBUG from pyconfig.h, but it is unclear if - the same works for the other two macros. Py_DEBUG implies them, - but not the other way around. - - The implementation is messy (issue #350): on Windows, with _MSC_VER, - we have to define Py_LIMITED_API even before including pyconfig.h. - In that case, we guess what pyconfig.h will do to the macros above, - and check our guess after the #include. - - Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv - version >= 16.0.0. With older versions of either, you don't get a - copy of PYTHON3.DLL in the virtualenv. We can't check the version of - CPython *before* we even include pyconfig.h. ffi.set_source() puts - a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is - running on Windows < 3.5, as an attempt at fixing it, but that's - arguably wrong because it may not be the target version of Python. - Still better than nothing I guess. As another workaround, you can - remove the definition of Py_LIMITED_API here. - - See also 'py_limited_api' in cffi/setuptools_ext.py. -*/ -#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) -# ifdef _MSC_VER -# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# include - /* sanity-check: Py_LIMITED_API will cause crashes if any of these - are also defined. Normally, the Python file PC/pyconfig.h does not - cause any of these to be defined, with the exception that _DEBUG - causes Py_DEBUG. Double-check that. */ -# ifdef Py_LIMITED_API -# if defined(Py_DEBUG) -# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" -# endif -# if defined(Py_TRACE_REFS) -# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" -# endif -# if defined(Py_REF_DEBUG) -# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" -# endif -# endif -# else -# include -# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# endif -#endif - -#include -#ifdef __cplusplus -extern "C" { -#endif -#include -#include "parse_c_type.h" - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#ifdef __GNUC__ -# define _CFFI_UNUSED_FN __attribute__((unused)) -#else -# define _CFFI_UNUSED_FN /* nothing */ -#endif - -#ifdef __cplusplus -# ifndef _Bool - typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ -# endif -#endif - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - not used any more -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ - PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) -#define _CFFI_CPIDX 25 -#define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) -#define _cffi_to_c_wchar3216_t \ - ((int(*)(PyObject *))_cffi_exports[26]) -#define _cffi_from_c_wchar3216_t \ - ((PyObject *(*)(int))_cffi_exports[27]) -#define _CFFI_NUM_EXPORTS 28 - -struct _cffi_ctypedescr; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; - -#define _cffi_type(index) ( \ - assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ - (struct _cffi_ctypedescr *)_cffi_types[index]) - -static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, - const struct _cffi_type_context_s *ctx) -{ - PyObject *module, *o_arg, *new_module; - void *raw[] = { - (void *)module_name, - (void *)version, - (void *)_cffi_exports, - (void *)ctx, - }; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - o_arg = PyLong_FromVoidPtr((void *)raw); - if (o_arg == NULL) - goto failure; - - new_module = PyObject_CallMethod( - module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); - - Py_DECREF(o_arg); - Py_DECREF(module); - return new_module; - - failure: - Py_XDECREF(module); - return NULL; -} - - -#ifdef HAVE_WCHAR_H -typedef wchar_t _cffi_wchar_t; -#else -typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ -#endif - -_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 2) - return (uint16_t)_cffi_to_c_wchar_t(o); - else - return (uint16_t)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) -{ - if (sizeof(_cffi_wchar_t) == 2) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 4) - return (int)_cffi_to_c_wchar_t(o); - else - return (int)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) -{ - if (sizeof(_cffi_wchar_t) == 4) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -_CFFI_UNUSED_FN static int -_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -_CFFI_UNUSED_FN static void -_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -/********** end CPython-specific section **********/ -#else -_CFFI_UNUSED_FN -static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); -# define _cffi_call_python _cffi_call_python_org -#endif - - -#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) - -#define _cffi_prim_int(size, sign) \ - ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ - (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ - (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ - (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ - _CFFI__UNKNOWN_PRIM) - -#define _cffi_prim_float(size) \ - ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ - (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ - (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ - _CFFI__UNKNOWN_FLOAT_PRIM) - -#define _cffi_check_int(got, got_nonpos, expected) \ - ((got_nonpos) == (expected <= 0) && \ - (got) == (unsigned long long)expected) - -#ifdef MS_WIN32 -# define _cffi_stdcall __stdcall -#else -# define _cffi_stdcall /* nothing */ -#endif - -#ifdef __cplusplus -} -#endif diff --git a/venv/lib/python3.10/site-packages/cffi/_embedding.h b/venv/lib/python3.10/site-packages/cffi/_embedding.h deleted file mode 100644 index 94d8b30..0000000 --- a/venv/lib/python3.10/site-packages/cffi/_embedding.h +++ /dev/null @@ -1,550 +0,0 @@ - -/***** Support code for embedding *****/ - -#ifdef __cplusplus -extern "C" { -#endif - - -#if defined(_WIN32) -# define CFFI_DLLEXPORT __declspec(dllexport) -#elif defined(__GNUC__) -# define CFFI_DLLEXPORT __attribute__((visibility("default"))) -#else -# define CFFI_DLLEXPORT /* nothing */ -#endif - - -/* There are two global variables of type _cffi_call_python_fnptr: - - * _cffi_call_python, which we declare just below, is the one called - by ``extern "Python"`` implementations. - - * _cffi_call_python_org, which on CPython is actually part of the - _cffi_exports[] array, is the function pointer copied from - _cffi_backend. If _cffi_start_python() fails, then this is set - to NULL; otherwise, it should never be NULL. - - After initialization is complete, both are equal. However, the - first one remains equal to &_cffi_start_and_call_python until the - very end of initialization, when we are (or should be) sure that - concurrent threads also see a completely initialized world, and - only then is it changed. -*/ -#undef _cffi_call_python -typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); -static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); -static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; - - -#ifndef _MSC_VER - /* --- Assuming a GCC not infinitely old --- */ -# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) -# define cffi_write_barrier() __sync_synchronize() -# if !defined(__amd64__) && !defined(__x86_64__) && \ - !defined(__i386__) && !defined(__i386) -# define cffi_read_barrier() __sync_synchronize() -# else -# define cffi_read_barrier() (void)0 -# endif -#else - /* --- Windows threads version --- */ -# include -# define cffi_compare_and_swap(l,o,n) \ - (InterlockedCompareExchangePointer(l,n,o) == (o)) -# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) -# define cffi_read_barrier() (void)0 -static volatile LONG _cffi_dummy; -#endif - -#ifdef WITH_THREAD -# ifndef _MSC_VER -# include - static pthread_mutex_t _cffi_embed_startup_lock; -# else - static CRITICAL_SECTION _cffi_embed_startup_lock; -# endif - static char _cffi_embed_startup_lock_ready = 0; -#endif - -static void _cffi_acquire_reentrant_mutex(void) -{ - static void *volatile lock = NULL; - - while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: pthread_mutex_init() should be very fast, and - this is only run at start-up anyway. */ - } - -#ifdef WITH_THREAD - if (!_cffi_embed_startup_lock_ready) { -# ifndef _MSC_VER - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&_cffi_embed_startup_lock, &attr); -# else - InitializeCriticalSection(&_cffi_embed_startup_lock); -# endif - _cffi_embed_startup_lock_ready = 1; - } -#endif - - while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) - ; - -#ifndef _MSC_VER - pthread_mutex_lock(&_cffi_embed_startup_lock); -#else - EnterCriticalSection(&_cffi_embed_startup_lock); -#endif -} - -static void _cffi_release_reentrant_mutex(void) -{ -#ifndef _MSC_VER - pthread_mutex_unlock(&_cffi_embed_startup_lock); -#else - LeaveCriticalSection(&_cffi_embed_startup_lock); -#endif -} - - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - -#include "_cffi_errors.h" - - -#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ - -static void _cffi_py_initialize(void) -{ - /* XXX use initsigs=0, which "skips initialization registration of - signal handlers, which might be useful when Python is - embedded" according to the Python docs. But review and think - if it should be a user-controllable setting. - - XXX we should also give a way to write errors to a buffer - instead of to stderr. - - XXX if importing 'site' fails, CPython (any version) calls - exit(). Should we try to work around this behavior here? - */ - Py_InitializeEx(0); -} - -static int _cffi_initialize_python(void) -{ - /* This initializes Python, imports _cffi_backend, and then the - present .dll/.so is set up as a CPython C extension module. - */ - int result; - PyGILState_STATE state; - PyObject *pycode=NULL, *global_dict=NULL, *x; - PyObject *builtins; - - state = PyGILState_Ensure(); - - /* Call the initxxx() function from the present module. It will - create and initialize us as a CPython extension module, instead - of letting the startup Python code do it---it might reimport - the same .dll/.so and get maybe confused on some platforms. - It might also have troubles locating the .dll/.so again for all - I know. - */ - (void)_CFFI_PYTHON_STARTUP_FUNC(); - if (PyErr_Occurred()) - goto error; - - /* Now run the Python code provided to ffi.embedding_init_code(). - */ - pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, - "", - Py_file_input); - if (pycode == NULL) - goto error; - global_dict = PyDict_New(); - if (global_dict == NULL) - goto error; - builtins = PyEval_GetBuiltins(); - if (builtins == NULL) - goto error; - if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) - goto error; - x = PyEval_EvalCode( -#if PY_MAJOR_VERSION < 3 - (PyCodeObject *) -#endif - pycode, global_dict, global_dict); - if (x == NULL) - goto error; - Py_DECREF(x); - - /* Done! Now if we've been called from - _cffi_start_and_call_python() in an ``extern "Python"``, we can - only hope that the Python code did correctly set up the - corresponding @ffi.def_extern() function. Otherwise, the - general logic of ``extern "Python"`` functions (inside the - _cffi_backend module) will find that the reference is still - missing and print an error. - */ - result = 0; - done: - Py_XDECREF(pycode); - Py_XDECREF(global_dict); - PyGILState_Release(state); - return result; - - error:; - { - /* Print as much information as potentially useful. - Debugging load-time failures with embedding is not fun - */ - PyObject *ecap; - PyObject *exception, *v, *tb, *f, *modules, *mod; - PyErr_Fetch(&exception, &v, &tb); - ecap = _cffi_start_error_capture(); - f = PySys_GetObject((char *)"stderr"); - if (f != NULL && f != Py_None) { - PyFile_WriteString( - "Failed to initialize the Python-CFFI embedding logic:\n\n", f); - } - - if (exception != NULL) { - PyErr_NormalizeException(&exception, &v, &tb); - PyErr_Display(exception, v, tb); - } - Py_XDECREF(exception); - Py_XDECREF(v); - Py_XDECREF(tb); - - if (f != NULL && f != Py_None) { - PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.17.1" - "\n_cffi_backend module: ", f); - modules = PyImport_GetModuleDict(); - mod = PyDict_GetItemString(modules, "_cffi_backend"); - if (mod == NULL) { - PyFile_WriteString("not loaded", f); - } - else { - v = PyObject_GetAttrString(mod, "__file__"); - PyFile_WriteObject(v, f, 0); - Py_XDECREF(v); - } - PyFile_WriteString("\nsys.path: ", f); - PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); - PyFile_WriteString("\n\n", f); - } - _cffi_stop_error_capture(ecap); - } - result = -1; - goto done; -} - -#if PY_VERSION_HEX < 0x03080000 -PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ -#endif - -static int _cffi_carefully_make_gil(void) -{ - /* This does the basic initialization of Python. It can be called - completely concurrently from unrelated threads. It assumes - that we don't hold the GIL before (if it exists), and we don't - hold it afterwards. - - (What it really does used to be completely different in Python 2 - and Python 3, with the Python 2 solution avoiding the spin-lock - around the Py_InitializeEx() call. However, after recent changes - to CPython 2.7 (issue #358) it no longer works. So we use the - Python 3 solution everywhere.) - - This initializes Python by calling Py_InitializeEx(). - Important: this must not be called concurrently at all. - So we use a global variable as a simple spin lock. This global - variable must be from 'libpythonX.Y.so', not from this - cffi-based extension module, because it must be shared from - different cffi-based extension modules. - - In Python < 3.8, we choose - _PyParser_TokenNames[0] as a completely arbitrary pointer value - that is never written to. The default is to point to the - string "ENDMARKER". We change it temporarily to point to the - next character in that string. (Yes, I know it's REALLY - obscure.) - - In Python >= 3.8, this string array is no longer writable, so - instead we pick PyCapsuleType.tp_version_tag. We can't change - Python < 3.8 because someone might use a mixture of cffi - embedded modules, some of which were compiled before this file - changed. - - In Python >= 3.12, this stopped working because that particular - tp_version_tag gets modified during interpreter startup. It's - arguably a bad idea before 3.12 too, but again we can't change - that because someone might use a mixture of cffi embedded - modules, and no-one reported a bug so far. In Python >= 3.12 - we go instead for PyCapsuleType.tp_as_buffer, which is supposed - to always be NULL. We write to it temporarily a pointer to - a struct full of NULLs, which is semantically the same. - */ - -#ifdef WITH_THREAD -# if PY_VERSION_HEX < 0x03080000 - char *volatile *lock = (char *volatile *)_PyParser_TokenNames; - char *old_value, *locked_value; - - while (1) { /* spin loop */ - old_value = *lock; - locked_value = old_value + 1; - if (old_value[0] == 'E') { - assert(old_value[1] == 'N'); - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { - assert(old_value[0] == 'N'); - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# else -# if PY_VERSION_HEX < 0x030C0000 - int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; - int old_value, locked_value = -42; - assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); -# else - static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; - empty_buffer_procs.mark = -42; - PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) - &PyCapsule_Type.tp_as_buffer; - PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; -# endif - - while (1) { /* spin loop */ - old_value = *lock; - if (old_value == 0) { - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { -# if PY_VERSION_HEX < 0x030C0000 - assert(old_value == locked_value); -# else - /* The pointer should point to a possibly different - empty_buffer_procs from another C extension module */ - assert(((struct ebp_s *)old_value)->mark == -42); -# endif - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# endif -#endif - - /* call Py_InitializeEx() */ - if (!Py_IsInitialized()) { - _cffi_py_initialize(); -#if PY_VERSION_HEX < 0x03070000 - PyEval_InitThreads(); -#endif - PyEval_SaveThread(); /* release the GIL */ - /* the returned tstate must be the one that has been stored into the - autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ - } - else { -#if PY_VERSION_HEX < 0x03070000 - /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ - PyGILState_STATE state = PyGILState_Ensure(); - PyEval_InitThreads(); - PyGILState_Release(state); -#endif - } - -#ifdef WITH_THREAD - /* release the lock */ - while (!cffi_compare_and_swap(lock, locked_value, old_value)) - ; -#endif - - return 0; -} - -/********** end CPython-specific section **********/ - - -#else - - -/********** PyPy-specific section **********/ - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ - -static struct _cffi_pypy_init_s { - const char *name; - void *func; /* function pointer */ - const char *code; -} _cffi_pypy_init = { - _CFFI_MODULE_NAME, - _CFFI_PYTHON_STARTUP_FUNC, - _CFFI_PYTHON_STARTUP_CODE, -}; - -extern int pypy_carefully_make_gil(const char *); -extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); - -static int _cffi_carefully_make_gil(void) -{ - return pypy_carefully_make_gil(_CFFI_MODULE_NAME); -} - -static int _cffi_initialize_python(void) -{ - return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); -} - -/********** end PyPy-specific section **********/ - - -#endif - - -#ifdef __GNUC__ -__attribute__((noinline)) -#endif -static _cffi_call_python_fnptr _cffi_start_python(void) -{ - /* Delicate logic to initialize Python. This function can be - called multiple times concurrently, e.g. when the process calls - its first ``extern "Python"`` functions in multiple threads at - once. It can also be called recursively, in which case we must - ignore it. We also have to consider what occurs if several - different cffi-based extensions reach this code in parallel - threads---it is a different copy of the code, then, and we - can't have any shared global variable unless it comes from - 'libpythonX.Y.so'. - - Idea: - - * _cffi_carefully_make_gil(): "carefully" call - PyEval_InitThreads() (possibly with Py_InitializeEx() first). - - * then we use a (local) custom lock to make sure that a call to this - cffi-based extension will wait if another call to the *same* - extension is running the initialization in another thread. - It is reentrant, so that a recursive call will not block, but - only one from a different thread. - - * then we grab the GIL and (Python 2) we call Py_InitializeEx(). - At this point, concurrent calls to Py_InitializeEx() are not - possible: we have the GIL. - - * do the rest of the specific initialization, which may - temporarily release the GIL but not the custom lock. - Only release the custom lock when we are done. - */ - static char called = 0; - - if (_cffi_carefully_make_gil() != 0) - return NULL; - - _cffi_acquire_reentrant_mutex(); - - /* Here the GIL exists, but we don't have it. We're only protected - from concurrency by the reentrant mutex. */ - - /* This file only initializes the embedded module once, the first - time this is called, even if there are subinterpreters. */ - if (!called) { - called = 1; /* invoke _cffi_initialize_python() only once, - but don't set '_cffi_call_python' right now, - otherwise concurrent threads won't call - this function at all (we need them to wait) */ - if (_cffi_initialize_python() == 0) { - /* now initialization is finished. Switch to the fast-path. */ - - /* We would like nobody to see the new value of - '_cffi_call_python' without also seeing the rest of the - data initialized. However, this is not possible. But - the new value of '_cffi_call_python' is the function - 'cffi_call_python()' from _cffi_backend. So: */ - cffi_write_barrier(); - /* ^^^ we put a write barrier here, and a corresponding - read barrier at the start of cffi_call_python(). This - ensures that after that read barrier, we see everything - done here before the write barrier. - */ - - assert(_cffi_call_python_org != NULL); - _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; - } - else { - /* initialization failed. Reset this to NULL, even if it was - already set to some other value. Future calls to - _cffi_start_python() are still forced to occur, and will - always return NULL from now on. */ - _cffi_call_python_org = NULL; - } - } - - _cffi_release_reentrant_mutex(); - - return (_cffi_call_python_fnptr)_cffi_call_python_org; -} - -static -void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) -{ - _cffi_call_python_fnptr fnptr; - int current_err = errno; -#ifdef _MSC_VER - int current_lasterr = GetLastError(); -#endif - fnptr = _cffi_start_python(); - if (fnptr == NULL) { - fprintf(stderr, "function %s() called, but initialization code " - "failed. Returning 0.\n", externpy->name); - memset(args, 0, externpy->size_of_result); - } -#ifdef _MSC_VER - SetLastError(current_lasterr); -#endif - errno = current_err; - - if (fnptr != NULL) - fnptr(externpy, args); -} - - -/* The cffi_start_python() function makes sure Python is initialized - and our cffi module is set up. It can be called manually from the - user C code. The same effect is obtained automatically from any - dll-exported ``extern "Python"`` function. This function returns - -1 if initialization failed, 0 if all is OK. */ -_CFFI_UNUSED_FN -static int cffi_start_python(void) -{ - if (_cffi_call_python == &_cffi_start_and_call_python) { - if (_cffi_start_python() == NULL) - return -1; - } - cffi_read_barrier(); - return 0; -} - -#undef cffi_compare_and_swap -#undef cffi_write_barrier -#undef cffi_read_barrier - -#ifdef __cplusplus -} -#endif diff --git a/venv/lib/python3.10/site-packages/cffi/_imp_emulation.py b/venv/lib/python3.10/site-packages/cffi/_imp_emulation.py deleted file mode 100644 index 136abdd..0000000 --- a/venv/lib/python3.10/site-packages/cffi/_imp_emulation.py +++ /dev/null @@ -1,83 +0,0 @@ - -try: - # this works on Python < 3.12 - from imp import * - -except ImportError: - # this is a limited emulation for Python >= 3.12. - # Note that this is used only for tests or for the old ffi.verify(). - # This is copied from the source code of Python 3.11. - - from _imp import (acquire_lock, release_lock, - is_builtin, is_frozen) - - from importlib._bootstrap import _load - - from importlib import machinery - import os - import sys - import tokenize - - SEARCH_ERROR = 0 - PY_SOURCE = 1 - PY_COMPILED = 2 - C_EXTENSION = 3 - PY_RESOURCE = 4 - PKG_DIRECTORY = 5 - C_BUILTIN = 6 - PY_FROZEN = 7 - PY_CODERESOURCE = 8 - IMP_HOOK = 9 - - def get_suffixes(): - extensions = [(s, 'rb', C_EXTENSION) - for s in machinery.EXTENSION_SUFFIXES] - source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] - bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] - return extensions + source + bytecode - - def find_module(name, path=None): - if not isinstance(name, str): - raise TypeError("'name' must be a str, not {}".format(type(name))) - elif not isinstance(path, (type(None), list)): - # Backwards-compatibility - raise RuntimeError("'path' must be None or a list, " - "not {}".format(type(path))) - - if path is None: - if is_builtin(name): - return None, None, ('', '', C_BUILTIN) - elif is_frozen(name): - return None, None, ('', '', PY_FROZEN) - else: - path = sys.path - - for entry in path: - package_directory = os.path.join(entry, name) - for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: - package_file_name = '__init__' + suffix - file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): - return None, package_directory, ('', '', PKG_DIRECTORY) - for suffix, mode, type_ in get_suffixes(): - file_name = name + suffix - file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): - break - else: - continue - break # Break out of outer loop when breaking out of inner loop. - else: - raise ImportError(name, name=name) - - encoding = None - if 'b' not in mode: - with open(file_path, 'rb') as file: - encoding = tokenize.detect_encoding(file.readline)[0] - file = open(file_path, mode, encoding=encoding) - return file, file_path, (suffix, mode, type_) - - def load_dynamic(name, path, file=None): - loader = machinery.ExtensionFileLoader(name, path) - spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) - return _load(spec) diff --git a/venv/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py b/venv/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py deleted file mode 100644 index c3d2312..0000000 --- a/venv/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful -error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. - -This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. -""" -import sys - -try: - # import setuptools first; this is the most robust way to ensure its embedded distutils is available - # (the .pth shim should usually work, but this is even more robust) - import setuptools -except Exception as ex: - if sys.version_info >= (3, 12): - # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex - - # silently ignore on older Pythons (support fallback to stdlib distutils where available) -else: - del setuptools - -try: - # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils - from distutils import log, sysconfig - from distutils.ccompiler import CCompiler - from distutils.command.build_ext import build_ext - from distutils.core import Distribution, Extension - from distutils.dir_util import mkpath - from distutils.errors import DistutilsSetupError, CompileError, LinkError - from distutils.log import set_threshold, set_verbosity - - if sys.platform == 'win32': - try: - # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` - from distutils.msvc9compiler import MSVCCompiler - except ImportError: - MSVCCompiler = None -except Exception as ex: - if sys.version_info >= (3, 12): - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex - - # anything older, just let the underlying distutils import error fly - raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex - -del sys diff --git a/venv/lib/python3.10/site-packages/cffi/api.py b/venv/lib/python3.10/site-packages/cffi/api.py deleted file mode 100644 index 5a474f3..0000000 --- a/venv/lib/python3.10/site-packages/cffi/api.py +++ /dev/null @@ -1,967 +0,0 @@ -import sys, types -from .lock import allocate_lock -from .error import CDefError -from . import model - -try: - callable -except NameError: - # Python 3.1 - from collections import Callable - callable = lambda x: isinstance(x, Callable) - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -_unspecified = object() - - - -class FFI(object): - r''' - The main top-level class that you instantiate once, or once per module. - - Example usage: - - ffi = FFI() - ffi.cdef(""" - int printf(const char *, ...); - """) - - C = ffi.dlopen(None) # standard library - -or- - C = ffi.verify() # use a C compiler: verify the decl above is right - - C.printf("hello, %s!\n", ffi.new("char[]", "world")) - ''' - - def __init__(self, backend=None): - """Create an FFI instance. The 'backend' argument is used to - select a non-default backend, mostly for tests. - """ - if backend is None: - # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with - # _cffi_backend.so compiled. - import _cffi_backend as backend - from . import __version__ - if backend.__version__ != __version__: - # bad version! Try to be as explicit as possible. - if hasattr(backend, '__file__'): - # CPython - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( - __version__, __file__, - backend.__version__, backend.__file__)) - else: - # PyPy - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( - __version__, __file__, backend.__version__)) - # (If you insist you can also try to pass the option - # 'backend=backend_ctypes.CTypesBackend()', but don't - # rely on it! It's probably not going to work well.) - - from . import cparser - self._backend = backend - self._lock = allocate_lock() - self._parser = cparser.Parser() - self._cached_btypes = {} - self._parsed_types = types.ModuleType('parsed_types').__dict__ - self._new_types = types.ModuleType('new_types').__dict__ - self._function_caches = [] - self._libraries = [] - self._cdefsources = [] - self._included_ffis = [] - self._windows_unicode = None - self._init_once_cache = {} - self._cdef_version = None - self._embedding = None - self._typecache = model.get_typecache(backend) - if hasattr(backend, 'set_ffi'): - backend.set_ffi(self) - for name in list(backend.__dict__): - if name.startswith('RTLD_'): - setattr(self, name, getattr(backend, name)) - # - with self._lock: - self.BVoidP = self._get_cached_btype(model.voidp_type) - self.BCharA = self._get_cached_btype(model.char_array_type) - if isinstance(backend, types.ModuleType): - # _cffi_backend: attach these constants to the class - if not hasattr(FFI, 'NULL'): - FFI.NULL = self.cast(self.BVoidP, 0) - FFI.CData, FFI.CType = backend._get_types() - else: - # ctypes backend: attach these constants to the instance - self.NULL = self.cast(self.BVoidP, 0) - self.CData, self.CType = backend._get_types() - self.buffer = backend.buffer - - def cdef(self, csource, override=False, packed=False, pack=None): - """Parse the given C source. This registers all declared functions, - types, and global variables. The functions and global variables can - then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. - The types can be used in 'ffi.new()' and other functions. - If 'packed' is specified as True, all structs declared inside this - cdef are packed, i.e. laid out without any field alignment at all. - Alternatively, 'pack' can be a small integer, and requests for - alignment greater than that are ignored (pack=1 is equivalent to - packed=True). - """ - self._cdef(csource, override=override, packed=packed, pack=pack) - - def embedding_api(self, csource, packed=False, pack=None): - self._cdef(csource, packed=packed, pack=pack, dllexport=True) - if self._embedding is None: - self._embedding = '' - - def _cdef(self, csource, override=False, **options): - if not isinstance(csource, str): # unicode, on Python 2 - if not isinstance(csource, basestring): - raise TypeError("cdef() argument must be a string") - csource = csource.encode('ascii') - with self._lock: - self._cdef_version = object() - self._parser.parse(csource, override=override, **options) - self._cdefsources.append(csource) - if override: - for cache in self._function_caches: - cache.clear() - finishlist = self._parser._recomplete - if finishlist: - self._parser._recomplete = [] - for tp in finishlist: - tp.finish_backend_type(self, finishlist) - - def dlopen(self, name, flags=0): - """Load and return a dynamic library identified by 'name'. - The standard C library can be loaded by passing None. - Note that functions and types declared by 'ffi.cdef()' are not - linked to a particular library, just like C headers; in the - library we only look for the actual (untyped) symbols. - """ - if not (isinstance(name, basestring) or - name is None or - isinstance(name, self.CData)): - raise TypeError("dlopen(name): name must be a file name, None, " - "or an already-opened 'void *' handle") - with self._lock: - lib, function_cache = _make_ffi_library(self, name, flags) - self._function_caches.append(function_cache) - self._libraries.append(lib) - return lib - - def dlclose(self, lib): - """Close a library obtained with ffi.dlopen(). After this call, - access to functions or variables from the library will fail - (possibly with a segmentation fault). - """ - type(lib).__cffi_close__(lib) - - def _typeof_locked(self, cdecl): - # call me with the lock! - key = cdecl - if key in self._parsed_types: - return self._parsed_types[key] - # - if not isinstance(cdecl, str): # unicode, on Python 2 - cdecl = cdecl.encode('ascii') - # - type = self._parser.parse_type(cdecl) - really_a_function_type = type.is_raw_function - if really_a_function_type: - type = type.as_function_pointer() - btype = self._get_cached_btype(type) - result = btype, really_a_function_type - self._parsed_types[key] = result - return result - - def _typeof(self, cdecl, consider_function_as_funcptr=False): - # string -> ctype object - try: - result = self._parsed_types[cdecl] - except KeyError: - with self._lock: - result = self._typeof_locked(cdecl) - # - btype, really_a_function_type = result - if really_a_function_type and not consider_function_as_funcptr: - raise CDefError("the type %r is a function type, not a " - "pointer-to-function type" % (cdecl,)) - return btype - - def typeof(self, cdecl): - """Parse the C type given as a string and return the - corresponding object. - It can also be used on 'cdata' instance to get its C type. - """ - if isinstance(cdecl, basestring): - return self._typeof(cdecl) - if isinstance(cdecl, self.CData): - return self._backend.typeof(cdecl) - if isinstance(cdecl, types.BuiltinFunctionType): - res = _builtin_function_type(cdecl) - if res is not None: - return res - if (isinstance(cdecl, types.FunctionType) - and hasattr(cdecl, '_cffi_base_type')): - with self._lock: - return self._get_cached_btype(cdecl._cffi_base_type) - raise TypeError(type(cdecl)) - - def sizeof(self, cdecl): - """Return the size in bytes of the argument. It can be a - string naming a C type, or a 'cdata' instance. - """ - if isinstance(cdecl, basestring): - BType = self._typeof(cdecl) - return self._backend.sizeof(BType) - else: - return self._backend.sizeof(cdecl) - - def alignof(self, cdecl): - """Return the natural alignment size in bytes of the C type - given as a string. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.alignof(cdecl) - - def offsetof(self, cdecl, *fields_or_indexes): - """Return the offset of the named field inside the given - structure or array, which must be given as a C type name. - You can give several field names in case of nested structures. - You can also give numeric values which correspond to array - items, in case of an array type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._typeoffsetof(cdecl, *fields_or_indexes)[1] - - def new(self, cdecl, init=None): - """Allocate an instance according to the specified C type and - return a pointer to it. The specified C type must be either a - pointer or an array: ``new('X *')`` allocates an X and returns - a pointer to it, whereas ``new('X[n]')`` allocates an array of - n X'es and returns an array referencing it (which works - mostly like a pointer, like in C). You can also use - ``new('X[]', n)`` to allocate an array of a non-constant - length n. - - The memory is initialized following the rules of declaring a - global variable in C: by default it is zero-initialized, but - an explicit initializer can be given which can be used to - fill all or part of the memory. - - When the returned object goes out of scope, the memory - is freed. In other words the returned object has - ownership of the value of type 'cdecl' that it points to. This - means that the raw data can be used as long as this object is - kept alive, but must not be used for a longer time. Be careful - about that when copying the pointer to the memory somewhere - else, e.g. into another structure. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.newp(cdecl, init) - - def new_allocator(self, alloc=None, free=None, - should_clear_after_alloc=True): - """Return a new allocator, i.e. a function that behaves like ffi.new() - but uses the provided low-level 'alloc' and 'free' functions. - - 'alloc' is called with the size as argument. If it returns NULL, a - MemoryError is raised. 'free' is called with the result of 'alloc' - as argument. Both can be either Python function or directly C - functions. If 'free' is None, then no free function is called. - If both 'alloc' and 'free' are None, the default is used. - - If 'should_clear_after_alloc' is set to False, then the memory - returned by 'alloc' is assumed to be already cleared (or you are - fine with garbage); otherwise CFFI will clear it. - """ - compiled_ffi = self._backend.FFI() - allocator = compiled_ffi.new_allocator(alloc, free, - should_clear_after_alloc) - def allocate(cdecl, init=None): - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return allocator(cdecl, init) - return allocate - - def cast(self, cdecl, source): - """Similar to a C cast: returns an instance of the named C - type initialized with the given 'source'. The source is - casted between integers or pointers of any type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.cast(cdecl, source) - - def string(self, cdata, maxlen=-1): - """Return a Python string (or unicode string) from the 'cdata'. - If 'cdata' is a pointer or array of characters or bytes, returns - the null-terminated string. The returned string extends until - the first null character, or at most 'maxlen' characters. If - 'cdata' is an array then 'maxlen' defaults to its length. - - If 'cdata' is a pointer or array of wchar_t, returns a unicode - string following the same rules. - - If 'cdata' is a single character or byte or a wchar_t, returns - it as a string or unicode string. - - If 'cdata' is an enum, returns the value of the enumerator as a - string, or 'NUMBER' if the value is out of range. - """ - return self._backend.string(cdata, maxlen) - - def unpack(self, cdata, length): - """Unpack an array of C data of the given length, - returning a Python string/unicode/list. - - If 'cdata' is a pointer to 'char', returns a byte string. - It does not stop at the first null. This is equivalent to: - ffi.buffer(cdata, length)[:] - - If 'cdata' is a pointer to 'wchar_t', returns a unicode string. - 'length' is measured in wchar_t's; it is not the size in bytes. - - If 'cdata' is a pointer to anything else, returns a list of - 'length' items. This is a faster equivalent to: - [cdata[i] for i in range(length)] - """ - return self._backend.unpack(cdata, length) - - #def buffer(self, cdata, size=-1): - # """Return a read-write buffer object that references the raw C data - # pointed to by the given 'cdata'. The 'cdata' must be a pointer or - # an array. Can be passed to functions expecting a buffer, or directly - # manipulated with: - # - # buf[:] get a copy of it in a regular string, or - # buf[idx] as a single character - # buf[:] = ... - # buf[idx] = ... change the content - # """ - # note that 'buffer' is a type, set on this instance by __init__ - - def from_buffer(self, cdecl, python_buffer=_unspecified, - require_writable=False): - """Return a cdata of the given type pointing to the data of the - given Python object, which must support the buffer interface. - Note that this is not meant to be used on the built-in types - str or unicode (you can build 'char[]' arrays explicitly) - but only on objects containing large quantities of raw data - in some other format, like 'array.array' or numpy arrays. - - The first argument is optional and default to 'char[]'. - """ - if python_buffer is _unspecified: - cdecl, python_buffer = self.BCharA, cdecl - elif isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.from_buffer(cdecl, python_buffer, - require_writable) - - def memmove(self, dest, src, n): - """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. - - Like the C function memmove(), the memory areas may overlap; - apart from that it behaves like the C function memcpy(). - - 'src' can be any cdata ptr or array, or any Python buffer object. - 'dest' can be any cdata ptr or array, or a writable Python buffer - object. The size to copy, 'n', is always measured in bytes. - - Unlike other methods, this one supports all Python buffer including - byte strings and bytearrays---but it still does not support - non-contiguous buffers. - """ - return self._backend.memmove(dest, src, n) - - def callback(self, cdecl, python_callable=None, error=None, onerror=None): - """Return a callback object or a decorator making such a - callback object. 'cdecl' must name a C function pointer type. - The callback invokes the specified 'python_callable' (which may - be provided either directly or via a decorator). Important: the - callback object must be manually kept alive for as long as the - callback may be invoked from the C level. - """ - def callback_decorator_wrap(python_callable): - if not callable(python_callable): - raise TypeError("the 'python_callable' argument " - "is not callable") - return self._backend.callback(cdecl, python_callable, - error, onerror) - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) - if python_callable is None: - return callback_decorator_wrap # decorator mode - else: - return callback_decorator_wrap(python_callable) # direct mode - - def getctype(self, cdecl, replace_with=''): - """Return a string giving the C type 'cdecl', which may be itself - a string or a object. If 'replace_with' is given, it gives - extra text to append (or insert for more complicated C types), like - a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - replace_with = replace_with.strip() - if (replace_with.startswith('*') - and '&[' in self._backend.getcname(cdecl, '&')): - replace_with = '(%s)' % replace_with - elif replace_with and not replace_with[0] in '[(': - replace_with = ' ' + replace_with - return self._backend.getcname(cdecl, replace_with) - - def gc(self, cdata, destructor, size=0): - """Return a new cdata object that points to the same - data. Later, when this new cdata object is garbage-collected, - 'destructor(old_cdata_object)' will be called. - - The optional 'size' gives an estimate of the size, used to - trigger the garbage collection more eagerly. So far only used - on PyPy. It tells the GC that the returned object keeps alive - roughly 'size' bytes of external memory. - """ - return self._backend.gcp(cdata, destructor, size) - - def _get_cached_btype(self, type): - assert self._lock.acquire(False) is False - # call me with the lock! - try: - BType = self._cached_btypes[type] - except KeyError: - finishlist = [] - BType = type.get_cached_btype(self, finishlist) - for type in finishlist: - type.finish_backend_type(self, finishlist) - return BType - - def verify(self, source='', tmpdir=None, **kwargs): - """Verify that the current ffi signatures compile on this - machine, and return a dynamic library object. The dynamic - library can be used to call functions and access global - variables declared in this 'ffi'. The library is compiled - by the C compiler: it gives you C-level API compatibility - (including calling macros). This is unlike 'ffi.dlopen()', - which requires binary compatibility in the signatures. - """ - from .verifier import Verifier, _caller_dir_pycache - # - # If set_unicode(True) was called, insert the UNICODE and - # _UNICODE macro declarations - if self._windows_unicode: - self._apply_windows_unicode(kwargs) - # - # Set the tmpdir here, and not in Verifier.__init__: it picks - # up the caller's directory, which we want to be the caller of - # ffi.verify(), as opposed to the caller of Veritier(). - tmpdir = tmpdir or _caller_dir_pycache() - # - # Make a Verifier() and use it to load the library. - self.verifier = Verifier(self, source, tmpdir, **kwargs) - lib = self.verifier.load_library() - # - # Save the loaded library for keep-alive purposes, even - # if the caller doesn't keep it alive itself (it should). - self._libraries.append(lib) - return lib - - def _get_errno(self): - return self._backend.get_errno() - def _set_errno(self, errno): - self._backend.set_errno(errno) - errno = property(_get_errno, _set_errno, None, - "the value of 'errno' from/to the C calls") - - def getwinerror(self, code=-1): - return self._backend.getwinerror(code) - - def _pointer_to(self, ctype): - with self._lock: - return model.pointer_cache(self, ctype) - - def addressof(self, cdata, *fields_or_indexes): - """Return the address of a . - If 'fields_or_indexes' are given, returns the address of that - field or array item in the structure or array, recursively in - case of nested structures. - """ - try: - ctype = self._backend.typeof(cdata) - except TypeError: - if '__addressof__' in type(cdata).__dict__: - return type(cdata).__addressof__(cdata, *fields_or_indexes) - raise - if fields_or_indexes: - ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) - else: - if ctype.kind == "pointer": - raise TypeError("addressof(pointer)") - offset = 0 - ctypeptr = self._pointer_to(ctype) - return self._backend.rawaddressof(ctypeptr, cdata, offset) - - def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): - ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) - for field1 in fields_or_indexes: - ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) - offset += offset1 - return ctype, offset - - def include(self, ffi_to_include): - """Includes the typedefs, structs, unions and enums defined - in another FFI instance. Usage is similar to a #include in C, - where a part of the program might include types defined in - another part for its own usage. Note that the include() - method has no effect on functions, constants and global - variables, which must anyway be accessed directly from the - lib object returned by the original FFI instance. - """ - if not isinstance(ffi_to_include, FFI): - raise TypeError("ffi.include() expects an argument that is also of" - " type cffi.FFI, not %r" % ( - type(ffi_to_include).__name__,)) - if ffi_to_include is self: - raise ValueError("self.include(self)") - with ffi_to_include._lock: - with self._lock: - self._parser.include(ffi_to_include._parser) - self._cdefsources.append('[') - self._cdefsources.extend(ffi_to_include._cdefsources) - self._cdefsources.append(']') - self._included_ffis.append(ffi_to_include) - - def new_handle(self, x): - return self._backend.newp_handle(self.BVoidP, x) - - def from_handle(self, x): - return self._backend.from_handle(x) - - def release(self, x): - self._backend.release(x) - - def set_unicode(self, enabled_flag): - """Windows: if 'enabled_flag' is True, enable the UNICODE and - _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR - to be (pointers to) wchar_t. If 'enabled_flag' is False, - declare these types to be (pointers to) plain 8-bit characters. - This is mostly for backward compatibility; you usually want True. - """ - if self._windows_unicode is not None: - raise ValueError("set_unicode() can only be called once") - enabled_flag = bool(enabled_flag) - if enabled_flag: - self.cdef("typedef wchar_t TBYTE;" - "typedef wchar_t TCHAR;" - "typedef const wchar_t *LPCTSTR;" - "typedef const wchar_t *PCTSTR;" - "typedef wchar_t *LPTSTR;" - "typedef wchar_t *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - else: - self.cdef("typedef char TBYTE;" - "typedef char TCHAR;" - "typedef const char *LPCTSTR;" - "typedef const char *PCTSTR;" - "typedef char *LPTSTR;" - "typedef char *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - self._windows_unicode = enabled_flag - - def _apply_windows_unicode(self, kwds): - defmacros = kwds.get('define_macros', ()) - if not isinstance(defmacros, (list, tuple)): - raise TypeError("'define_macros' must be a list or tuple") - defmacros = list(defmacros) + [('UNICODE', '1'), - ('_UNICODE', '1')] - kwds['define_macros'] = defmacros - - def _apply_embedding_fix(self, kwds): - # must include an argument like "-lpython2.7" for the compiler - def ensure(key, value): - lst = kwds.setdefault(key, []) - if value not in lst: - lst.append(value) - # - if '__pypy__' in sys.builtin_module_names: - import os - if sys.platform == "win32": - # we need 'libpypy-c.lib'. Current distributions of - # pypy (>= 4.1) contain it as 'libs/python27.lib'. - pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'libs')) - else: - # we need 'libpypy-c.{so,dylib}', which should be by - # default located in 'sys.prefix/bin' for installed - # systems. - if sys.version_info < (3,): - pythonlib = "pypy-c" - else: - pythonlib = "pypy3-c" - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - # On uninstalled pypy's, the libpypy-c is typically found in - # .../pypy/goal/. - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) - else: - if sys.platform == "win32": - template = "python%d%d" - if hasattr(sys, 'gettotalrefcount'): - template += '_d' - else: - try: - import sysconfig - except ImportError: # 2.6 - from cffi._shimmed_dist_utils import sysconfig - template = "python%d.%d" - if sysconfig.get_config_var('DEBUG_EXT'): - template += sysconfig.get_config_var('DEBUG_EXT') - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - if hasattr(sys, 'abiflags'): - pythonlib += sys.abiflags - ensure('libraries', pythonlib) - if sys.platform == "win32": - ensure('extra_link_args', '/MANIFEST') - - def set_source(self, module_name, source, source_extension='.c', **kwds): - import os - if hasattr(self, '_assigned_source'): - raise ValueError("set_source() cannot be called several times " - "per ffi object") - if not isinstance(module_name, basestring): - raise TypeError("'module_name' must be a string") - if os.sep in module_name or (os.altsep and os.altsep in module_name): - raise ValueError("'module_name' must not contain '/': use a dotted " - "name to make a 'package.module' location") - self._assigned_source = (str(module_name), source, - source_extension, kwds) - - def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, - source_extension='.c', **kwds): - from . import pkgconfig - if not isinstance(pkgconfig_libs, list): - raise TypeError("the pkgconfig_libs argument must be a list " - "of package names") - kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) - pkgconfig.merge_flags(kwds, kwds2) - self.set_source(module_name, source, source_extension, **kwds) - - def distutils_extension(self, tmpdir='build', verbose=True): - from cffi._shimmed_dist_utils import mkpath - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored - return self.verifier.get_extension() - raise ValueError("set_source() must be called before" - " distutils_extension()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("distutils_extension() is only for C extension " - "modules, not for dlopen()-style pure Python " - "modules") - mkpath(tmpdir) - ext, updated = recompile(self, module_name, - source, tmpdir=tmpdir, extradir=tmpdir, - source_extension=source_extension, - call_c_compiler=False, **kwds) - if verbose: - if updated: - sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) - else: - sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) - return ext - - def emit_c_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("emit_c_code() is only for C extension modules, " - "not for dlopen()-style pure Python modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def emit_python_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is not None: - raise TypeError("emit_python_code() is only for dlopen()-style " - "pure Python modules, not for C extension modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def compile(self, tmpdir='.', verbose=0, target=None, debug=None): - """The 'target' argument gives the final file name of the - compiled DLL. Use '*' to force distutils' choice, suitable for - regular CPython C API modules. Use a file name ending in '.*' - to ask for the system's default extension for dynamic libraries - (.so/.dll/.dylib). - - The default is '*' when building a non-embedded C API extension, - and (module_name + '.*') when building an embedded library. - """ - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before compile()") - module_name, source, source_extension, kwds = self._assigned_source - return recompile(self, module_name, source, tmpdir=tmpdir, - target=target, source_extension=source_extension, - compiler_verbose=verbose, debug=debug, **kwds) - - def init_once(self, func, tag): - # Read _init_once_cache[tag], which is either (False, lock) if - # we're calling the function now in some thread, or (True, result). - # Don't call setdefault() in most cases, to avoid allocating and - # immediately freeing a lock; but still use setdefaut() to avoid - # races. - try: - x = self._init_once_cache[tag] - except KeyError: - x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) - # Common case: we got (True, result), so we return the result. - if x[0]: - return x[1] - # Else, it's a lock. Acquire it to serialize the following tests. - with x[1]: - # Read again from _init_once_cache the current status. - x = self._init_once_cache[tag] - if x[0]: - return x[1] - # Call the function and store the result back. - result = func() - self._init_once_cache[tag] = (True, result) - return result - - def embedding_init_code(self, pysource): - if self._embedding: - raise ValueError("embedding_init_code() can only be called once") - # fix 'pysource' before it gets dumped into the C file: - # - remove empty lines at the beginning, so it starts at "line 1" - # - dedent, if all non-empty lines are indented - # - check for SyntaxErrors - import re - match = re.match(r'\s*\n', pysource) - if match: - pysource = pysource[match.end():] - lines = pysource.splitlines() or [''] - prefix = re.match(r'\s*', lines[0]).group() - for i in range(1, len(lines)): - line = lines[i] - if line.rstrip(): - while not line.startswith(prefix): - prefix = prefix[:-1] - i = len(prefix) - lines = [line[i:]+'\n' for line in lines] - pysource = ''.join(lines) - # - compile(pysource, "cffi_init", "exec") - # - self._embedding = pysource - - def def_extern(self, *args, **kwds): - raise ValueError("ffi.def_extern() is only available on API-mode FFI " - "objects") - - def list_types(self): - """Returns the user type names known to this FFI instance. - This returns a tuple containing three lists of names: - (typedef_names, names_of_structs, names_of_unions) - """ - typedefs = [] - structs = [] - unions = [] - for key in self._parser._declarations: - if key.startswith('typedef '): - typedefs.append(key[8:]) - elif key.startswith('struct '): - structs.append(key[7:]) - elif key.startswith('union '): - unions.append(key[6:]) - typedefs.sort() - structs.sort() - unions.sort() - return (typedefs, structs, unions) - - -def _load_backend_lib(backend, name, flags): - import os - if not isinstance(name, basestring): - if sys.platform != "win32" or name is not None: - return backend.load_library(name, flags) - name = "c" # Windows: load_library(None) fails, but this works - # on Python 2 (backward compatibility hack only) - first_error = None - if '.' in name or '/' in name or os.sep in name: - try: - return backend.load_library(name, flags) - except OSError as e: - first_error = e - import ctypes.util - path = ctypes.util.find_library(name) - if path is None: - if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): - raise OSError("dlopen(None) cannot work on Windows for Python 3 " - "(see http://bugs.python.org/issue23606)") - msg = ("ctypes.util.find_library() did not manage " - "to locate a library called %r" % (name,)) - if first_error is not None: - msg = "%s. Additionally, %s" % (first_error, msg) - raise OSError(msg) - return backend.load_library(path, flags) - -def _make_ffi_library(ffi, libname, flags): - backend = ffi._backend - backendlib = _load_backend_lib(backend, libname, flags) - # - def accessor_function(name): - key = 'function ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - value = backendlib.load_function(BType, name) - library.__dict__[name] = value - # - def accessor_variable(name): - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - read_variable = backendlib.read_variable - write_variable = backendlib.write_variable - setattr(FFILibrary, name, property( - lambda self: read_variable(BType, name), - lambda self, value: write_variable(BType, name, value))) - # - def addressof_var(name): - try: - return addr_variables[name] - except KeyError: - with ffi._lock: - if name not in addr_variables: - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - if BType.kind != 'array': - BType = model.pointer_cache(ffi, BType) - p = backendlib.load_function(BType, name) - addr_variables[name] = p - return addr_variables[name] - # - def accessor_constant(name): - raise NotImplementedError("non-integer constant '%s' cannot be " - "accessed from a dlopen() library" % (name,)) - # - def accessor_int_constant(name): - library.__dict__[name] = ffi._parser._int_constants[name] - # - accessors = {} - accessors_version = [False] - addr_variables = {} - # - def update_accessors(): - if accessors_version[0] is ffi._cdef_version: - return - # - for key, (tp, _) in ffi._parser._declarations.items(): - if not isinstance(tp, model.EnumType): - tag, name = key.split(' ', 1) - if tag == 'function': - accessors[name] = accessor_function - elif tag == 'variable': - accessors[name] = accessor_variable - elif tag == 'constant': - accessors[name] = accessor_constant - else: - for i, enumname in enumerate(tp.enumerators): - def accessor_enum(name, tp=tp, i=i): - tp.check_not_partial() - library.__dict__[name] = tp.enumvalues[i] - accessors[enumname] = accessor_enum - for name in ffi._parser._int_constants: - accessors.setdefault(name, accessor_int_constant) - accessors_version[0] = ffi._cdef_version - # - def make_accessor(name): - with ffi._lock: - if name in library.__dict__ or name in FFILibrary.__dict__: - return # added by another thread while waiting for the lock - if name not in accessors: - update_accessors() - if name not in accessors: - raise AttributeError(name) - accessors[name](name) - # - class FFILibrary(object): - def __getattr__(self, name): - make_accessor(name) - return getattr(self, name) - def __setattr__(self, name, value): - try: - property = getattr(self.__class__, name) - except AttributeError: - make_accessor(name) - setattr(self, name, value) - else: - property.__set__(self, value) - def __dir__(self): - with ffi._lock: - update_accessors() - return accessors.keys() - def __addressof__(self, name): - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - make_accessor(name) - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - raise AttributeError("cffi library has no function or " - "global variable named '%s'" % (name,)) - def __cffi_close__(self): - backendlib.close_lib() - self.__dict__.clear() - # - if isinstance(libname, basestring): - try: - if not isinstance(libname, str): # unicode, on Python 2 - libname = libname.encode('utf-8') - FFILibrary.__name__ = 'FFILibrary_%s' % libname - except UnicodeError: - pass - library = FFILibrary() - return library, library.__dict__ - -def _builtin_function_type(func): - # a hack to make at least ffi.typeof(builtin_function) work, - # if the builtin function was obtained by 'vengine_cpy'. - import sys - try: - module = sys.modules[func.__module__] - ffi = module._cffi_original_ffi - types_of_builtin_funcs = module._cffi_types_of_builtin_funcs - tp = types_of_builtin_funcs[func] - except (KeyError, AttributeError, TypeError): - return None - else: - with ffi._lock: - return ffi._get_cached_btype(tp) diff --git a/venv/lib/python3.10/site-packages/cffi/backend_ctypes.py b/venv/lib/python3.10/site-packages/cffi/backend_ctypes.py deleted file mode 100644 index e7956a7..0000000 --- a/venv/lib/python3.10/site-packages/cffi/backend_ctypes.py +++ /dev/null @@ -1,1121 +0,0 @@ -import ctypes, ctypes.util, operator, sys -from . import model - -if sys.version_info < (3,): - bytechr = chr -else: - unicode = str - long = int - xrange = range - bytechr = lambda num: bytes([num]) - -class CTypesType(type): - pass - -class CTypesData(object): - __metaclass__ = CTypesType - __slots__ = ['__weakref__'] - __name__ = '' - - def __init__(self, *args): - raise TypeError("cannot instantiate %r" % (self.__class__,)) - - @classmethod - def _newp(cls, init): - raise TypeError("expected a pointer or array ctype, got '%s'" - % (cls._get_c_name(),)) - - @staticmethod - def _to_ctypes(value): - raise TypeError - - @classmethod - def _arg_to_ctypes(cls, *value): - try: - ctype = cls._ctype - except AttributeError: - raise TypeError("cannot create an instance of %r" % (cls,)) - if value: - res = cls._to_ctypes(*value) - if not isinstance(res, ctype): - res = cls._ctype(res) - else: - res = cls._ctype() - return res - - @classmethod - def _create_ctype_obj(cls, init): - if init is None: - return cls._arg_to_ctypes() - else: - return cls._arg_to_ctypes(init) - - @staticmethod - def _from_ctypes(ctypes_value): - raise TypeError - - @classmethod - def _get_c_name(cls, replace_with=''): - return cls._reftypename.replace(' &', replace_with) - - @classmethod - def _fix_class(cls): - cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__module__ = 'ffi' - - def _get_own_repr(self): - raise NotImplementedError - - def _addr_repr(self, address): - if address == 0: - return 'NULL' - else: - if address < 0: - address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) - return '0x%x' % address - - def __repr__(self, c_name=None): - own = self._get_own_repr() - return '' % (c_name or self._get_c_name(), own) - - def _convert_to_address(self, BClass): - if BClass is None: - raise TypeError("cannot convert %r to an address" % ( - self._get_c_name(),)) - else: - raise TypeError("cannot convert %r to %r" % ( - self._get_c_name(), BClass._get_c_name())) - - @classmethod - def _get_size(cls): - return ctypes.sizeof(cls._ctype) - - def _get_size_of_instance(self): - return ctypes.sizeof(self._ctype) - - @classmethod - def _cast_from(cls, source): - raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) - - def _cast_to_integer(self): - return self._convert_to_address(None) - - @classmethod - def _alignment(cls): - return ctypes.alignment(cls._ctype) - - def __iter__(self): - raise TypeError("cdata %r does not support iteration" % ( - self._get_c_name()),) - - def _make_cmp(name): - cmpfunc = getattr(operator, name) - def cmp(self, other): - v_is_ptr = not isinstance(self, CTypesGenericPrimitive) - w_is_ptr = (isinstance(other, CTypesData) and - not isinstance(other, CTypesGenericPrimitive)) - if v_is_ptr and w_is_ptr: - return cmpfunc(self._convert_to_address(None), - other._convert_to_address(None)) - elif v_is_ptr or w_is_ptr: - return NotImplemented - else: - if isinstance(self, CTypesGenericPrimitive): - self = self._value - if isinstance(other, CTypesGenericPrimitive): - other = other._value - return cmpfunc(self, other) - cmp.func_name = name - return cmp - - __eq__ = _make_cmp('__eq__') - __ne__ = _make_cmp('__ne__') - __lt__ = _make_cmp('__lt__') - __le__ = _make_cmp('__le__') - __gt__ = _make_cmp('__gt__') - __ge__ = _make_cmp('__ge__') - - def __hash__(self): - return hash(self._convert_to_address(None)) - - def _to_string(self, maxlen): - raise TypeError("string(): %r" % (self,)) - - -class CTypesGenericPrimitive(CTypesData): - __slots__ = [] - - def __hash__(self): - return hash(self._value) - - def _get_own_repr(self): - return repr(self._from_ctypes(self._value)) - - -class CTypesGenericArray(CTypesData): - __slots__ = [] - - @classmethod - def _newp(cls, init): - return cls(init) - - def __iter__(self): - for i in xrange(len(self)): - yield self[i] - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - -class CTypesGenericPtr(CTypesData): - __slots__ = ['_address', '_as_ctype_ptr'] - _automatic_casts = False - kind = "pointer" - - @classmethod - def _newp(cls, init): - return cls(init) - - @classmethod - def _cast_from(cls, source): - if source is None: - address = 0 - elif isinstance(source, CTypesData): - address = source._cast_to_integer() - elif isinstance(source, (int, long)): - address = source - else: - raise TypeError("bad type for cast to %r: %r" % - (cls, type(source).__name__)) - return cls._new_pointer_at(address) - - @classmethod - def _new_pointer_at(cls, address): - self = cls.__new__(cls) - self._address = address - self._as_ctype_ptr = ctypes.cast(address, cls._ctype) - return self - - def _get_own_repr(self): - try: - return self._addr_repr(self._address) - except AttributeError: - return '???' - - def _cast_to_integer(self): - return self._address - - def __nonzero__(self): - return bool(self._address) - __bool__ = __nonzero__ - - @classmethod - def _to_ctypes(cls, value): - if not isinstance(value, CTypesData): - raise TypeError("unexpected %s object" % type(value).__name__) - address = value._convert_to_address(cls) - return ctypes.cast(address, cls._ctype) - - @classmethod - def _from_ctypes(cls, ctypes_ptr): - address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 - return cls._new_pointer_at(address) - - @classmethod - def _initialize(cls, ctypes_ptr, value): - if value: - ctypes_ptr.contents = cls._to_ctypes(value).contents - - def _convert_to_address(self, BClass): - if (BClass in (self.__class__, None) or BClass._automatic_casts - or self._automatic_casts): - return self._address - else: - return CTypesData._convert_to_address(self, BClass) - - -class CTypesBaseStructOrUnion(CTypesData): - __slots__ = ['_blob'] - - @classmethod - def _create_ctype_obj(cls, init): - # may be overridden - raise TypeError("cannot instantiate opaque type %s" % (cls,)) - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - @classmethod - def _offsetof(cls, fieldname): - return getattr(cls._ctype, fieldname).offset - - def _convert_to_address(self, BClass): - if getattr(BClass, '_BItem', None) is self.__class__: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @classmethod - def _from_ctypes(cls, ctypes_struct_or_union): - self = cls.__new__(cls) - self._blob = ctypes_struct_or_union - return self - - @classmethod - def _to_ctypes(cls, value): - return value._blob - - def __repr__(self, c_name=None): - return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) - - -class CTypesBackend(object): - - PRIMITIVE_TYPES = { - 'char': ctypes.c_char, - 'short': ctypes.c_short, - 'int': ctypes.c_int, - 'long': ctypes.c_long, - 'long long': ctypes.c_longlong, - 'signed char': ctypes.c_byte, - 'unsigned char': ctypes.c_ubyte, - 'unsigned short': ctypes.c_ushort, - 'unsigned int': ctypes.c_uint, - 'unsigned long': ctypes.c_ulong, - 'unsigned long long': ctypes.c_ulonglong, - 'float': ctypes.c_float, - 'double': ctypes.c_double, - '_Bool': ctypes.c_bool, - } - - for _name in ['unsigned long long', 'unsigned long', - 'unsigned int', 'unsigned short', 'unsigned char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] - - for _name in ['long long', 'long', 'int', 'short', 'signed char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] - PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] - - - def __init__(self): - self.RTLD_LAZY = 0 # not supported anyway by ctypes - self.RTLD_NOW = 0 - self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL - self.RTLD_LOCAL = ctypes.RTLD_LOCAL - - def set_ffi(self, ffi): - self.ffi = ffi - - def _get_types(self): - return CTypesData, CTypesType - - def load_library(self, path, flags=0): - cdll = ctypes.CDLL(path, flags) - return CTypesLibrary(self, cdll) - - def new_void_type(self): - class CTypesVoid(CTypesData): - __slots__ = [] - _reftypename = 'void &' - @staticmethod - def _from_ctypes(novalue): - return None - @staticmethod - def _to_ctypes(novalue): - if novalue is not None: - raise TypeError("None expected, got %s object" % - (type(novalue).__name__,)) - return None - CTypesVoid._fix_class() - return CTypesVoid - - def new_primitive_type(self, name): - if name == 'wchar_t': - raise NotImplementedError(name) - ctype = self.PRIMITIVE_TYPES[name] - if name == 'char': - kind = 'char' - elif name in ('float', 'double'): - kind = 'float' - else: - if name in ('signed char', 'unsigned char'): - kind = 'byte' - elif name == '_Bool': - kind = 'bool' - else: - kind = 'int' - is_signed = (ctype(-1).value == -1) - # - def _cast_source_to_int(source): - if isinstance(source, (int, long, float)): - source = int(source) - elif isinstance(source, CTypesData): - source = source._cast_to_integer() - elif isinstance(source, bytes): - source = ord(source) - elif source is None: - source = 0 - else: - raise TypeError("bad type for cast to %r: %r" % - (CTypesPrimitive, type(source).__name__)) - return source - # - kind1 = kind - class CTypesPrimitive(CTypesGenericPrimitive): - __slots__ = ['_value'] - _ctype = ctype - _reftypename = '%s &' % name - kind = kind1 - - def __init__(self, value): - self._value = value - - @staticmethod - def _create_ctype_obj(init): - if init is None: - return ctype() - return ctype(CTypesPrimitive._to_ctypes(init)) - - if kind == 'int' or kind == 'byte': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = ctype(source).value # cast within range - return cls(source) - def __int__(self): - return self._value - - if kind == 'bool': - @classmethod - def _cast_from(cls, source): - if not isinstance(source, (int, long, float)): - source = _cast_source_to_int(source) - return cls(bool(source)) - def __int__(self): - return int(self._value) - - if kind == 'char': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = bytechr(source & 0xFF) - return cls(source) - def __int__(self): - return ord(self._value) - - if kind == 'float': - @classmethod - def _cast_from(cls, source): - if isinstance(source, float): - pass - elif isinstance(source, CTypesGenericPrimitive): - if hasattr(source, '__float__'): - source = float(source) - else: - source = int(source) - else: - source = _cast_source_to_int(source) - source = ctype(source).value # fix precision - return cls(source) - def __int__(self): - return int(self._value) - def __float__(self): - return self._value - - _cast_to_integer = __int__ - - if kind == 'int' or kind == 'byte' or kind == 'bool': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long)): - if isinstance(x, CTypesData): - x = int(x) - else: - raise TypeError("integer expected, got %s" % - type(x).__name__) - if ctype(x).value != x: - if not is_signed and x < 0: - raise OverflowError("%s: negative integer" % name) - else: - raise OverflowError("%s: integer out of bounds" - % name) - return x - - if kind == 'char': - @staticmethod - def _to_ctypes(x): - if isinstance(x, bytes) and len(x) == 1: - return x - if isinstance(x, CTypesPrimitive): # > - return x._value - raise TypeError("character expected, got %s" % - type(x).__name__) - def __nonzero__(self): - return ord(self._value) != 0 - else: - def __nonzero__(self): - return self._value != 0 - __bool__ = __nonzero__ - - if kind == 'float': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long, float, CTypesData)): - raise TypeError("float expected, got %s" % - type(x).__name__) - return ctype(x).value - - @staticmethod - def _from_ctypes(value): - return getattr(value, 'value', value) - - @staticmethod - def _initialize(blob, init): - blob.value = CTypesPrimitive._to_ctypes(init) - - if kind == 'char': - def _to_string(self, maxlen): - return self._value - if kind == 'byte': - def _to_string(self, maxlen): - return chr(self._value & 0xff) - # - CTypesPrimitive._fix_class() - return CTypesPrimitive - - def new_pointer_type(self, BItem): - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'charp' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'bytep' - elif BItem is getbtype(model.void_type): - kind = 'voidp' - else: - kind = 'generic' - # - class CTypesPtr(CTypesGenericPtr): - __slots__ = ['_own'] - if kind == 'charp': - __slots__ += ['__as_strbuf'] - _BItem = BItem - if hasattr(BItem, '_ctype'): - _ctype = ctypes.POINTER(BItem._ctype) - _bitem_size = ctypes.sizeof(BItem._ctype) - else: - _ctype = ctypes.c_void_p - if issubclass(BItem, CTypesGenericArray): - _reftypename = BItem._get_c_name('(* &)') - else: - _reftypename = BItem._get_c_name(' * &') - - def __init__(self, init): - ctypeobj = BItem._create_ctype_obj(init) - if kind == 'charp': - self.__as_strbuf = ctypes.create_string_buffer( - ctypeobj.value + b'\x00') - self._as_ctype_ptr = ctypes.cast( - self.__as_strbuf, self._ctype) - else: - self._as_ctype_ptr = ctypes.pointer(ctypeobj) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own = True - - def __add__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address + - other * self._bitem_size) - else: - return NotImplemented - - def __sub__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address - - other * self._bitem_size) - elif type(self) is type(other): - return (self._address - other._address) // self._bitem_size - else: - return NotImplemented - - def __getitem__(self, index): - if getattr(self, '_own', False) and index != 0: - raise IndexError - return BItem._from_ctypes(self._as_ctype_ptr[index]) - - def __setitem__(self, index, value): - self._as_ctype_ptr[index] = BItem._to_ctypes(value) - - if kind == 'charp' or kind == 'voidp': - @classmethod - def _arg_to_ctypes(cls, *value): - if value and isinstance(value[0], bytes): - return ctypes.c_char_p(value[0]) - else: - return super(CTypesPtr, cls)._arg_to_ctypes(*value) - - if kind == 'charp' or kind == 'bytep': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = sys.maxsize - p = ctypes.cast(self._as_ctype_ptr, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % ( - ctypes.sizeof(self._as_ctype_ptr.contents),) - return super(CTypesPtr, self)._get_own_repr() - # - if (BItem is self.ffi._get_cached_btype(model.void_type) or - BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): - CTypesPtr._automatic_casts = True - # - CTypesPtr._fix_class() - return CTypesPtr - - def new_array_type(self, CTypesPtr, length): - if length is None: - brackets = ' &[]' - else: - brackets = ' &[%d]' % length - BItem = CTypesPtr._BItem - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'char' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'byte' - else: - kind = 'generic' - # - class CTypesArray(CTypesGenericArray): - __slots__ = ['_blob', '_own'] - if length is not None: - _ctype = BItem._ctype * length - else: - __slots__.append('_ctype') - _reftypename = BItem._get_c_name(brackets) - _declared_length = length - _CTPtr = CTypesPtr - - def __init__(self, init): - if length is None: - if isinstance(init, (int, long)): - len1 = init - init = None - elif kind == 'char' and isinstance(init, bytes): - len1 = len(init) + 1 # extra null - else: - init = tuple(init) - len1 = len(init) - self._ctype = BItem._ctype * len1 - self._blob = self._ctype() - self._own = True - if init is not None: - self._initialize(self._blob, init) - - @staticmethod - def _initialize(blob, init): - if isinstance(init, bytes): - init = [init[i:i+1] for i in range(len(init))] - else: - if isinstance(init, CTypesGenericArray): - if (len(init) != len(blob) or - not isinstance(init, CTypesArray)): - raise TypeError("length/type mismatch: %s" % (init,)) - init = tuple(init) - if len(init) > len(blob): - raise IndexError("too many initializers") - addr = ctypes.cast(blob, ctypes.c_void_p).value - PTR = ctypes.POINTER(BItem._ctype) - itemsize = ctypes.sizeof(BItem._ctype) - for i, value in enumerate(init): - p = ctypes.cast(addr + i * itemsize, PTR) - BItem._initialize(p.contents, value) - - def __len__(self): - return len(self._blob) - - def __getitem__(self, index): - if not (0 <= index < len(self._blob)): - raise IndexError - return BItem._from_ctypes(self._blob[index]) - - def __setitem__(self, index, value): - if not (0 <= index < len(self._blob)): - raise IndexError - self._blob[index] = BItem._to_ctypes(value) - - if kind == 'char' or kind == 'byte': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = len(self._blob) - p = ctypes.cast(self._blob, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % (ctypes.sizeof(self._blob),) - return super(CTypesArray, self)._get_own_repr() - - def _convert_to_address(self, BClass): - if BClass in (CTypesPtr, None) or BClass._automatic_casts: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @staticmethod - def _from_ctypes(ctypes_array): - self = CTypesArray.__new__(CTypesArray) - self._blob = ctypes_array - return self - - @staticmethod - def _arg_to_ctypes(value): - return CTypesPtr._arg_to_ctypes(value) - - def __add__(self, other): - if isinstance(other, (int, long)): - return CTypesPtr._new_pointer_at( - ctypes.addressof(self._blob) + - other * ctypes.sizeof(BItem._ctype)) - else: - return NotImplemented - - @classmethod - def _cast_from(cls, source): - raise NotImplementedError("casting to %r" % ( - cls._get_c_name(),)) - # - CTypesArray._fix_class() - return CTypesArray - - def _new_struct_or_union(self, kind, name, base_ctypes_class): - # - class struct_or_union(base_ctypes_class): - pass - struct_or_union.__name__ = '%s_%s' % (kind, name) - kind1 = kind - # - class CTypesStructOrUnion(CTypesBaseStructOrUnion): - __slots__ = ['_blob'] - _ctype = struct_or_union - _reftypename = '%s &' % (name,) - _kind = kind = kind1 - # - CTypesStructOrUnion._fix_class() - return CTypesStructOrUnion - - def new_struct_type(self, name): - return self._new_struct_or_union('struct', name, ctypes.Structure) - - def new_union_type(self, name): - return self._new_struct_or_union('union', name, ctypes.Union) - - def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, - totalsize=-1, totalalignment=-1, sflags=0, - pack=0): - if totalsize >= 0 or totalalignment >= 0: - raise NotImplementedError("the ctypes backend of CFFI does not support " - "structures completed by verify(); please " - "compile and install the _cffi_backend module.") - struct_or_union = CTypesStructOrUnion._ctype - fnames = [fname for (fname, BField, bitsize) in fields] - btypes = [BField for (fname, BField, bitsize) in fields] - bitfields = [bitsize for (fname, BField, bitsize) in fields] - # - bfield_types = {} - cfields = [] - for (fname, BField, bitsize) in fields: - if bitsize < 0: - cfields.append((fname, BField._ctype)) - bfield_types[fname] = BField - else: - cfields.append((fname, BField._ctype, bitsize)) - bfield_types[fname] = Ellipsis - if sflags & 8: - struct_or_union._pack_ = 1 - elif pack: - struct_or_union._pack_ = pack - struct_or_union._fields_ = cfields - CTypesStructOrUnion._bfield_types = bfield_types - # - @staticmethod - def _create_ctype_obj(init): - result = struct_or_union() - if init is not None: - initialize(result, init) - return result - CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj - # - def initialize(blob, init): - if is_union: - if len(init) > 1: - raise ValueError("union initializer: %d items given, but " - "only one supported (use a dict if needed)" - % (len(init),)) - if not isinstance(init, dict): - if isinstance(init, (bytes, unicode)): - raise TypeError("union initializer: got a str") - init = tuple(init) - if len(init) > len(fnames): - raise ValueError("too many values for %s initializer" % - CTypesStructOrUnion._get_c_name()) - init = dict(zip(fnames, init)) - addr = ctypes.addressof(blob) - for fname, value in init.items(): - BField, bitsize = name2fieldtype[fname] - assert bitsize < 0, \ - "not implemented: initializer with bit fields" - offset = CTypesStructOrUnion._offsetof(fname) - PTR = ctypes.POINTER(BField._ctype) - p = ctypes.cast(addr + offset, PTR) - BField._initialize(p.contents, value) - is_union = CTypesStructOrUnion._kind == 'union' - name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) - # - for fname, BField, bitsize in fields: - if fname == '': - raise NotImplementedError("nested anonymous structs/unions") - if hasattr(CTypesStructOrUnion, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - if bitsize < 0: - def getter(self, fname=fname, BField=BField, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BField._from_ctypes(p.contents) - def setter(self, value, fname=fname, BField=BField): - setattr(self._blob, fname, BField._to_ctypes(value)) - # - if issubclass(BField, CTypesGenericArray): - setter = None - if BField._declared_length == 0: - def getter(self, fname=fname, BFieldPtr=BField._CTPtr, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BFieldPtr._from_ctypes(p) - # - else: - def getter(self, fname=fname, BField=BField): - return BField._from_ctypes(getattr(self._blob, fname)) - def setter(self, value, fname=fname, BField=BField): - # xxx obscure workaround - value = BField._to_ctypes(value) - oldvalue = getattr(self._blob, fname) - setattr(self._blob, fname, value) - if value != getattr(self._blob, fname): - setattr(self._blob, fname, oldvalue) - raise OverflowError("value too large for bitfield") - setattr(CTypesStructOrUnion, fname, property(getter, setter)) - # - CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) - for fname in fnames: - if hasattr(CTypesPtr, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - def getter(self, fname=fname): - return getattr(self[0], fname) - def setter(self, value, fname=fname): - setattr(self[0], fname, value) - setattr(CTypesPtr, fname, property(getter, setter)) - - def new_function_type(self, BArgs, BResult, has_varargs): - nameargs = [BArg._get_c_name() for BArg in BArgs] - if has_varargs: - nameargs.append('...') - nameargs = ', '.join(nameargs) - # - class CTypesFunctionPtr(CTypesGenericPtr): - __slots__ = ['_own_callback', '_name'] - _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), - *[BArg._ctype for BArg in BArgs], - use_errno=True) - _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) - - def __init__(self, init, error=None): - # create a callback to the Python callable init() - import traceback - assert not has_varargs, "varargs not supported for callbacks" - if getattr(BResult, '_ctype', None) is not None: - error = BResult._from_ctypes( - BResult._create_ctype_obj(error)) - else: - error = None - def callback(*args): - args2 = [] - for arg, BArg in zip(args, BArgs): - args2.append(BArg._from_ctypes(arg)) - try: - res2 = init(*args2) - res2 = BResult._to_ctypes(res2) - except: - traceback.print_exc() - res2 = error - if issubclass(BResult, CTypesGenericPtr): - if res2: - res2 = ctypes.cast(res2, ctypes.c_void_p).value - # .value: http://bugs.python.org/issue1574593 - else: - res2 = None - #print repr(res2) - return res2 - if issubclass(BResult, CTypesGenericPtr): - # The only pointers callbacks can return are void*s: - # http://bugs.python.org/issue5710 - callback_ctype = ctypes.CFUNCTYPE( - ctypes.c_void_p, - *[BArg._ctype for BArg in BArgs], - use_errno=True) - else: - callback_ctype = CTypesFunctionPtr._ctype - self._as_ctype_ptr = callback_ctype(callback) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own_callback = init - - @staticmethod - def _initialize(ctypes_ptr, value): - if value: - raise NotImplementedError("ctypes backend: not supported: " - "initializers for function pointers") - - def __repr__(self): - c_name = getattr(self, '_name', None) - if c_name: - i = self._reftypename.index('(* &)') - if self._reftypename[i-1] not in ' )*': - c_name = ' ' + c_name - c_name = self._reftypename.replace('(* &)', c_name) - return CTypesData.__repr__(self, c_name) - - def _get_own_repr(self): - if getattr(self, '_own_callback', None) is not None: - return 'calling %r' % (self._own_callback,) - return super(CTypesFunctionPtr, self)._get_own_repr() - - def __call__(self, *args): - if has_varargs: - assert len(args) >= len(BArgs) - extraargs = args[len(BArgs):] - args = args[:len(BArgs)] - else: - assert len(args) == len(BArgs) - ctypes_args = [] - for arg, BArg in zip(args, BArgs): - ctypes_args.append(BArg._arg_to_ctypes(arg)) - if has_varargs: - for i, arg in enumerate(extraargs): - if arg is None: - ctypes_args.append(ctypes.c_void_p(0)) # NULL - continue - if not isinstance(arg, CTypesData): - raise TypeError( - "argument %d passed in the variadic part " - "needs to be a cdata object (got %s)" % - (1 + len(BArgs) + i, type(arg).__name__)) - ctypes_args.append(arg._arg_to_ctypes(arg)) - result = self._as_ctype_ptr(*ctypes_args) - return BResult._from_ctypes(result) - # - CTypesFunctionPtr._fix_class() - return CTypesFunctionPtr - - def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): - assert isinstance(name, str) - reverse_mapping = dict(zip(reversed(enumvalues), - reversed(enumerators))) - # - class CTypesEnum(CTypesInt): - __slots__ = [] - _reftypename = '%s &' % name - - def _get_own_repr(self): - value = self._value - try: - return '%d: %s' % (value, reverse_mapping[value]) - except KeyError: - return str(value) - - def _to_string(self, maxlen): - value = self._value - try: - return reverse_mapping[value] - except KeyError: - return str(value) - # - CTypesEnum._fix_class() - return CTypesEnum - - def get_errno(self): - return ctypes.get_errno() - - def set_errno(self, value): - ctypes.set_errno(value) - - def string(self, b, maxlen=-1): - return b._to_string(maxlen) - - def buffer(self, bptr, size=-1): - raise NotImplementedError("buffer() with ctypes backend") - - def sizeof(self, cdata_or_BType): - if isinstance(cdata_or_BType, CTypesData): - return cdata_or_BType._get_size_of_instance() - else: - assert issubclass(cdata_or_BType, CTypesData) - return cdata_or_BType._get_size() - - def alignof(self, BType): - assert issubclass(BType, CTypesData) - return BType._alignment() - - def newp(self, BType, source): - if not issubclass(BType, CTypesData): - raise TypeError - return BType._newp(source) - - def cast(self, BType, source): - return BType._cast_from(source) - - def callback(self, BType, source, error, onerror): - assert onerror is None # XXX not implemented - return BType(source, error) - - _weakref_cache_ref = None - - def gcp(self, cdata, destructor, size=0): - if self._weakref_cache_ref is None: - import weakref - class MyRef(weakref.ref): - def __eq__(self, other): - myref = self() - return self is other or ( - myref is not None and myref is other()) - def __ne__(self, other): - return not (self == other) - def __hash__(self): - try: - return self._hash - except AttributeError: - self._hash = hash(self()) - return self._hash - self._weakref_cache_ref = {}, MyRef - weak_cache, MyRef = self._weakref_cache_ref - - if destructor is None: - try: - del weak_cache[MyRef(cdata)] - except KeyError: - raise TypeError("Can remove destructor only on a object " - "previously returned by ffi.gc()") - return None - - def remove(k): - cdata, destructor = weak_cache.pop(k, (None, None)) - if destructor is not None: - destructor(cdata) - - new_cdata = self.cast(self.typeof(cdata), cdata) - assert new_cdata is not cdata - weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) - return new_cdata - - typeof = type - - def getcname(self, BType, replace_with): - return BType._get_c_name(replace_with) - - def typeoffsetof(self, BType, fieldname, num=0): - if isinstance(fieldname, str): - if num == 0 and issubclass(BType, CTypesGenericPtr): - BType = BType._BItem - if not issubclass(BType, CTypesBaseStructOrUnion): - raise TypeError("expected a struct or union ctype") - BField = BType._bfield_types[fieldname] - if BField is Ellipsis: - raise TypeError("not supported for bitfields") - return (BField, BType._offsetof(fieldname)) - elif isinstance(fieldname, (int, long)): - if issubclass(BType, CTypesGenericArray): - BType = BType._CTPtr - if not issubclass(BType, CTypesGenericPtr): - raise TypeError("expected an array or ptr ctype") - BItem = BType._BItem - offset = BItem._get_size() * fieldname - if offset > sys.maxsize: - raise OverflowError - return (BItem, offset) - else: - raise TypeError(type(fieldname)) - - def rawaddressof(self, BTypePtr, cdata, offset=None): - if isinstance(cdata, CTypesBaseStructOrUnion): - ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) - elif isinstance(cdata, CTypesGenericPtr): - if offset is None or not issubclass(type(cdata)._BItem, - CTypesBaseStructOrUnion): - raise TypeError("unexpected cdata type") - ptr = type(cdata)._to_ctypes(cdata) - elif isinstance(cdata, CTypesGenericArray): - ptr = type(cdata)._to_ctypes(cdata) - else: - raise TypeError("expected a ") - if offset: - ptr = ctypes.cast( - ctypes.c_void_p( - ctypes.cast(ptr, ctypes.c_void_p).value + offset), - type(ptr)) - return BTypePtr._from_ctypes(ptr) - - -class CTypesLibrary(object): - - def __init__(self, backend, cdll): - self.backend = backend - self.cdll = cdll - - def load_function(self, BType, name): - c_func = getattr(self.cdll, name) - funcobj = BType._from_ctypes(c_func) - funcobj._name = name - return funcobj - - def read_variable(self, BType, name): - try: - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - except AttributeError as e: - raise NotImplementedError(e) - return BType._from_ctypes(ctypes_obj) - - def write_variable(self, BType, name, value): - new_ctypes_obj = BType._to_ctypes(value) - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - ctypes.memmove(ctypes.addressof(ctypes_obj), - ctypes.addressof(new_ctypes_obj), - ctypes.sizeof(BType._ctype)) diff --git a/venv/lib/python3.10/site-packages/cffi/cffi_opcode.py b/venv/lib/python3.10/site-packages/cffi/cffi_opcode.py deleted file mode 100644 index 6421df6..0000000 --- a/venv/lib/python3.10/site-packages/cffi/cffi_opcode.py +++ /dev/null @@ -1,187 +0,0 @@ -from .error import VerificationError - -class CffiOp(object): - def __init__(self, op, arg): - self.op = op - self.arg = arg - - def as_c_expr(self): - if self.op is None: - assert isinstance(self.arg, str) - return '(_cffi_opcode_t)(%s)' % (self.arg,) - classname = CLASS_NAME[self.op] - return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) - - def as_python_bytes(self): - if self.op is None and self.arg.isdigit(): - value = int(self.arg) # non-negative: '-' not in self.arg - if value >= 2**31: - raise OverflowError("cannot emit %r: limited to 2**31-1" - % (self.arg,)) - return format_four_bytes(value) - if isinstance(self.arg, str): - raise VerificationError("cannot emit to Python: %r" % (self.arg,)) - return format_four_bytes((self.arg << 8) | self.op) - - def __str__(self): - classname = CLASS_NAME.get(self.op, self.op) - return '(%s %s)' % (classname, self.arg) - -def format_four_bytes(num): - return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( - (num >> 24) & 0xFF, - (num >> 16) & 0xFF, - (num >> 8) & 0xFF, - (num ) & 0xFF) - -OP_PRIMITIVE = 1 -OP_POINTER = 3 -OP_ARRAY = 5 -OP_OPEN_ARRAY = 7 -OP_STRUCT_UNION = 9 -OP_ENUM = 11 -OP_FUNCTION = 13 -OP_FUNCTION_END = 15 -OP_NOOP = 17 -OP_BITFIELD = 19 -OP_TYPENAME = 21 -OP_CPYTHON_BLTN_V = 23 # varargs -OP_CPYTHON_BLTN_N = 25 # noargs -OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) -OP_CONSTANT = 29 -OP_CONSTANT_INT = 31 -OP_GLOBAL_VAR = 33 -OP_DLOPEN_FUNC = 35 -OP_DLOPEN_CONST = 37 -OP_GLOBAL_VAR_F = 39 -OP_EXTERN_PYTHON = 41 - -PRIM_VOID = 0 -PRIM_BOOL = 1 -PRIM_CHAR = 2 -PRIM_SCHAR = 3 -PRIM_UCHAR = 4 -PRIM_SHORT = 5 -PRIM_USHORT = 6 -PRIM_INT = 7 -PRIM_UINT = 8 -PRIM_LONG = 9 -PRIM_ULONG = 10 -PRIM_LONGLONG = 11 -PRIM_ULONGLONG = 12 -PRIM_FLOAT = 13 -PRIM_DOUBLE = 14 -PRIM_LONGDOUBLE = 15 - -PRIM_WCHAR = 16 -PRIM_INT8 = 17 -PRIM_UINT8 = 18 -PRIM_INT16 = 19 -PRIM_UINT16 = 20 -PRIM_INT32 = 21 -PRIM_UINT32 = 22 -PRIM_INT64 = 23 -PRIM_UINT64 = 24 -PRIM_INTPTR = 25 -PRIM_UINTPTR = 26 -PRIM_PTRDIFF = 27 -PRIM_SIZE = 28 -PRIM_SSIZE = 29 -PRIM_INT_LEAST8 = 30 -PRIM_UINT_LEAST8 = 31 -PRIM_INT_LEAST16 = 32 -PRIM_UINT_LEAST16 = 33 -PRIM_INT_LEAST32 = 34 -PRIM_UINT_LEAST32 = 35 -PRIM_INT_LEAST64 = 36 -PRIM_UINT_LEAST64 = 37 -PRIM_INT_FAST8 = 38 -PRIM_UINT_FAST8 = 39 -PRIM_INT_FAST16 = 40 -PRIM_UINT_FAST16 = 41 -PRIM_INT_FAST32 = 42 -PRIM_UINT_FAST32 = 43 -PRIM_INT_FAST64 = 44 -PRIM_UINT_FAST64 = 45 -PRIM_INTMAX = 46 -PRIM_UINTMAX = 47 -PRIM_FLOATCOMPLEX = 48 -PRIM_DOUBLECOMPLEX = 49 -PRIM_CHAR16 = 50 -PRIM_CHAR32 = 51 - -_NUM_PRIM = 52 -_UNKNOWN_PRIM = -1 -_UNKNOWN_FLOAT_PRIM = -2 -_UNKNOWN_LONG_DOUBLE = -3 - -_IO_FILE_STRUCT = -1 - -PRIMITIVE_TO_INDEX = { - 'char': PRIM_CHAR, - 'short': PRIM_SHORT, - 'int': PRIM_INT, - 'long': PRIM_LONG, - 'long long': PRIM_LONGLONG, - 'signed char': PRIM_SCHAR, - 'unsigned char': PRIM_UCHAR, - 'unsigned short': PRIM_USHORT, - 'unsigned int': PRIM_UINT, - 'unsigned long': PRIM_ULONG, - 'unsigned long long': PRIM_ULONGLONG, - 'float': PRIM_FLOAT, - 'double': PRIM_DOUBLE, - 'long double': PRIM_LONGDOUBLE, - '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, - '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, - '_Bool': PRIM_BOOL, - 'wchar_t': PRIM_WCHAR, - 'char16_t': PRIM_CHAR16, - 'char32_t': PRIM_CHAR32, - 'int8_t': PRIM_INT8, - 'uint8_t': PRIM_UINT8, - 'int16_t': PRIM_INT16, - 'uint16_t': PRIM_UINT16, - 'int32_t': PRIM_INT32, - 'uint32_t': PRIM_UINT32, - 'int64_t': PRIM_INT64, - 'uint64_t': PRIM_UINT64, - 'intptr_t': PRIM_INTPTR, - 'uintptr_t': PRIM_UINTPTR, - 'ptrdiff_t': PRIM_PTRDIFF, - 'size_t': PRIM_SIZE, - 'ssize_t': PRIM_SSIZE, - 'int_least8_t': PRIM_INT_LEAST8, - 'uint_least8_t': PRIM_UINT_LEAST8, - 'int_least16_t': PRIM_INT_LEAST16, - 'uint_least16_t': PRIM_UINT_LEAST16, - 'int_least32_t': PRIM_INT_LEAST32, - 'uint_least32_t': PRIM_UINT_LEAST32, - 'int_least64_t': PRIM_INT_LEAST64, - 'uint_least64_t': PRIM_UINT_LEAST64, - 'int_fast8_t': PRIM_INT_FAST8, - 'uint_fast8_t': PRIM_UINT_FAST8, - 'int_fast16_t': PRIM_INT_FAST16, - 'uint_fast16_t': PRIM_UINT_FAST16, - 'int_fast32_t': PRIM_INT_FAST32, - 'uint_fast32_t': PRIM_UINT_FAST32, - 'int_fast64_t': PRIM_INT_FAST64, - 'uint_fast64_t': PRIM_UINT_FAST64, - 'intmax_t': PRIM_INTMAX, - 'uintmax_t': PRIM_UINTMAX, - } - -F_UNION = 0x01 -F_CHECK_FIELDS = 0x02 -F_PACKED = 0x04 -F_EXTERNAL = 0x08 -F_OPAQUE = 0x10 - -G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) - for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', - 'F_EXTERNAL', 'F_OPAQUE']]) - -CLASS_NAME = {} -for _name, _value in list(globals().items()): - if _name.startswith('OP_') and isinstance(_value, int): - CLASS_NAME[_value] = _name[3:] diff --git a/venv/lib/python3.10/site-packages/cffi/commontypes.py b/venv/lib/python3.10/site-packages/cffi/commontypes.py deleted file mode 100644 index d4dae35..0000000 --- a/venv/lib/python3.10/site-packages/cffi/commontypes.py +++ /dev/null @@ -1,82 +0,0 @@ -import sys -from . import model -from .error import FFIError - - -COMMON_TYPES = {} - -try: - # fetch "bool" and all simple Windows types - from _cffi_backend import _get_common_types - _get_common_types(COMMON_TYPES) -except ImportError: - pass - -COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') -COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above -COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' -COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' - -for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - if _type.endswith('_t'): - COMMON_TYPES[_type] = _type -del _type - -_CACHE = {} - -def resolve_common_type(parser, commontype): - try: - return _CACHE[commontype] - except KeyError: - cdecl = COMMON_TYPES.get(commontype, commontype) - if not isinstance(cdecl, str): - result, quals = cdecl, 0 # cdecl is already a BaseType - elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - result, quals = model.PrimitiveType(cdecl), 0 - elif cdecl == 'set-unicode-needed': - raise FFIError("The Windows type %r is only available after " - "you call ffi.set_unicode()" % (commontype,)) - else: - if commontype == cdecl: - raise FFIError( - "Unsupported type: %r. Please look at " - "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " - "and file an issue if you think this type should really " - "be supported." % (commontype,)) - result, quals = parser.parse_type_and_quals(cdecl) # recursive - - assert isinstance(result, model.BaseTypeByIdentity) - _CACHE[commontype] = result, quals - return result, quals - - -# ____________________________________________________________ -# extra types for Windows (most of them are in commontypes.c) - - -def win_common_types(): - return { - "UNICODE_STRING": model.StructType( - "_UNICODE_STRING", - ["Length", - "MaximumLength", - "Buffer"], - [model.PrimitiveType("unsigned short"), - model.PrimitiveType("unsigned short"), - model.PointerType(model.PrimitiveType("wchar_t"))], - [-1, -1, -1]), - "PUNICODE_STRING": "UNICODE_STRING *", - "PCUNICODE_STRING": "const UNICODE_STRING *", - - "TBYTE": "set-unicode-needed", - "TCHAR": "set-unicode-needed", - "LPCTSTR": "set-unicode-needed", - "PCTSTR": "set-unicode-needed", - "LPTSTR": "set-unicode-needed", - "PTSTR": "set-unicode-needed", - "PTBYTE": "set-unicode-needed", - "PTCHAR": "set-unicode-needed", - } - -if sys.platform == 'win32': - COMMON_TYPES.update(win_common_types()) diff --git a/venv/lib/python3.10/site-packages/cffi/cparser.py b/venv/lib/python3.10/site-packages/cffi/cparser.py deleted file mode 100644 index eee83ca..0000000 --- a/venv/lib/python3.10/site-packages/cffi/cparser.py +++ /dev/null @@ -1,1015 +0,0 @@ -from . import model -from .commontypes import COMMON_TYPES, resolve_common_type -from .error import FFIError, CDefError -try: - from . import _pycparser as pycparser -except ImportError: - import pycparser -import weakref, re, sys - -try: - if sys.version_info < (3,): - import thread as _thread - else: - import _thread - lock = _thread.allocate_lock() -except ImportError: - lock = None - -def _workaround_for_static_import_finders(): - # Issue #392: packaging tools like cx_Freeze can not find these - # because pycparser uses exec dynamic import. This is an obscure - # workaround. This function is never called. - import pycparser.yacctab - import pycparser.lextab - -CDEF_SOURCE_STRING = "" -_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", - re.DOTALL | re.MULTILINE) -_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" - r"\b((?:[^\n\\]|\\.)*?)$", - re.DOTALL | re.MULTILINE) -_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) -_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") -_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") -_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") -_r_words = re.compile(r"\w+|\S") -_parser_cache = None -_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) -_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") -_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") -_r_cdecl = re.compile(r"\b__cdecl\b") -_r_extern_python = re.compile(r'\bextern\s*"' - r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') -_r_star_const_space = re.compile( # matches "* const " - r"[*]\s*((const|volatile|restrict)\b\s*)+") -_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" - r"\.\.\.") -_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") - -def _get_parser(): - global _parser_cache - if _parser_cache is None: - _parser_cache = pycparser.CParser() - return _parser_cache - -def _workaround_for_old_pycparser(csource): - # Workaround for a pycparser issue (fixed between pycparser 2.10 and - # 2.14): "char*const***" gives us a wrong syntax tree, the same as - # for "char***(*const)". This means we can't tell the difference - # afterwards. But "char(*const(***))" gives us the right syntax - # tree. The issue only occurs if there are several stars in - # sequence with no parenthesis inbetween, just possibly qualifiers. - # Attempt to fix it by adding some parentheses in the source: each - # time we see "* const" or "* const *", we add an opening - # parenthesis before each star---the hard part is figuring out where - # to close them. - parts = [] - while True: - match = _r_star_const_space.search(csource) - if not match: - break - #print repr(''.join(parts)+csource), '=>', - parts.append(csource[:match.start()]) - parts.append('('); closing = ')' - parts.append(match.group()) # e.g. "* const " - endpos = match.end() - if csource.startswith('*', endpos): - parts.append('('); closing += ')' - level = 0 - i = endpos - while i < len(csource): - c = csource[i] - if c == '(': - level += 1 - elif c == ')': - if level == 0: - break - level -= 1 - elif c in ',;=': - if level == 0: - break - i += 1 - csource = csource[endpos:i] + closing + csource[i:] - #print repr(''.join(parts)+csource) - parts.append(csource) - return ''.join(parts) - -def _preprocess_extern_python(csource): - # input: `extern "Python" int foo(int);` or - # `extern "Python" { int foo(int); }` - # output: - # void __cffi_extern_python_start; - # int foo(int); - # void __cffi_extern_python_stop; - # - # input: `extern "Python+C" int foo(int);` - # output: - # void __cffi_extern_python_plus_c_start; - # int foo(int); - # void __cffi_extern_python_stop; - parts = [] - while True: - match = _r_extern_python.search(csource) - if not match: - break - endpos = match.end() - 1 - #print - #print ''.join(parts)+csource - #print '=>' - parts.append(csource[:match.start()]) - if 'C' in match.group(1): - parts.append('void __cffi_extern_python_plus_c_start; ') - else: - parts.append('void __cffi_extern_python_start; ') - if csource[endpos] == '{': - # grouping variant - closing = csource.find('}', endpos) - if closing < 0: - raise CDefError("'extern \"Python\" {': no '}' found") - if csource.find('{', endpos + 1, closing) >= 0: - raise NotImplementedError("cannot use { } inside a block " - "'extern \"Python\" { ... }'") - parts.append(csource[endpos+1:closing]) - csource = csource[closing+1:] - else: - # non-grouping variant - semicolon = csource.find(';', endpos) - if semicolon < 0: - raise CDefError("'extern \"Python\": no ';' found") - parts.append(csource[endpos:semicolon+1]) - csource = csource[semicolon+1:] - parts.append(' void __cffi_extern_python_stop;') - #print ''.join(parts)+csource - #print - parts.append(csource) - return ''.join(parts) - -def _warn_for_string_literal(csource): - if '"' not in csource: - return - for line in csource.splitlines(): - if '"' in line and not line.lstrip().startswith('#'): - import warnings - warnings.warn("String literal found in cdef() or type source. " - "String literals are ignored here, but you should " - "remove them anyway because some character sequences " - "confuse pre-parsing.") - break - -def _warn_for_non_extern_non_static_global_variable(decl): - if not decl.storage: - import warnings - warnings.warn("Global variable '%s' in cdef(): for consistency " - "with C it should have a storage class specifier " - "(usually 'extern')" % (decl.name,)) - -def _remove_line_directives(csource): - # _r_line_directive matches whole lines, without the final \n, if they - # start with '#line' with some spacing allowed, or '#NUMBER'. This - # function stores them away and replaces them with exactly the string - # '#line@N', where N is the index in the list 'line_directives'. - line_directives = [] - def replace(m): - i = len(line_directives) - line_directives.append(m.group()) - return '#line@%d' % i - csource = _r_line_directive.sub(replace, csource) - return csource, line_directives - -def _put_back_line_directives(csource, line_directives): - def replace(m): - s = m.group() - if not s.startswith('#line@'): - raise AssertionError("unexpected #line directive " - "(should have been processed and removed") - return line_directives[int(s[6:])] - return _r_line_directive.sub(replace, csource) - -def _preprocess(csource): - # First, remove the lines of the form '#line N "filename"' because - # the "filename" part could confuse the rest - csource, line_directives = _remove_line_directives(csource) - # Remove comments. NOTE: this only work because the cdef() section - # should not contain any string literals (except in line directives)! - def replace_keeping_newlines(m): - return ' ' + m.group().count('\n') * '\n' - csource = _r_comment.sub(replace_keeping_newlines, csource) - # Remove the "#define FOO x" lines - macros = {} - for match in _r_define.finditer(csource): - macroname, macrovalue = match.groups() - macrovalue = macrovalue.replace('\\\n', '').strip() - macros[macroname] = macrovalue - csource = _r_define.sub('', csource) - # - if pycparser.__version__ < '2.14': - csource = _workaround_for_old_pycparser(csource) - # - # BIG HACK: replace WINAPI or __stdcall with "volatile const". - # It doesn't make sense for the return type of a function to be - # "volatile volatile const", so we abuse it to detect __stdcall... - # Hack number 2 is that "int(volatile *fptr)();" is not valid C - # syntax, so we place the "volatile" before the opening parenthesis. - csource = _r_stdcall2.sub(' volatile volatile const(', csource) - csource = _r_stdcall1.sub(' volatile volatile const ', csource) - csource = _r_cdecl.sub(' ', csource) - # - # Replace `extern "Python"` with start/end markers - csource = _preprocess_extern_python(csource) - # - # Now there should not be any string literal left; warn if we get one - _warn_for_string_literal(csource) - # - # Replace "[...]" with "[__dotdotdotarray__]" - csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) - # - # Replace "...}" with "__dotdotdotNUM__}". This construction should - # occur only at the end of enums; at the end of structs we have "...;}" - # and at the end of vararg functions "...);". Also replace "=...[,}]" - # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when - # giving an unknown value. - matches = list(_r_partial_enum.finditer(csource)) - for number, match in enumerate(reversed(matches)): - p = match.start() - if csource[p] == '=': - p2 = csource.find('...', p, match.end()) - assert p2 > p - csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, - csource[p2+3:]) - else: - assert csource[p:p+3] == '...' - csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, - csource[p+3:]) - # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" - csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) - # Replace "float ..." or "double..." with "__dotdotdotfloat__" - csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) - # Replace all remaining "..." with the same name, "__dotdotdot__", - # which is declared with a typedef for the purpose of C parsing. - csource = csource.replace('...', ' __dotdotdot__ ') - # Finally, put back the line directives - csource = _put_back_line_directives(csource, line_directives) - return csource, macros - -def _common_type_names(csource): - # Look in the source for what looks like usages of types from the - # list of common types. A "usage" is approximated here as the - # appearance of the word, minus a "definition" of the type, which - # is the last word in a "typedef" statement. Approximative only - # but should be fine for all the common types. - look_for_words = set(COMMON_TYPES) - look_for_words.add(';') - look_for_words.add(',') - look_for_words.add('(') - look_for_words.add(')') - look_for_words.add('typedef') - words_used = set() - is_typedef = False - paren = 0 - previous_word = '' - for word in _r_words.findall(csource): - if word in look_for_words: - if word == ';': - if is_typedef: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - is_typedef = False - elif word == 'typedef': - is_typedef = True - paren = 0 - elif word == '(': - paren += 1 - elif word == ')': - paren -= 1 - elif word == ',': - if is_typedef and paren == 0: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - else: # word in COMMON_TYPES - words_used.add(word) - previous_word = word - return words_used - - -class Parser(object): - - def __init__(self): - self._declarations = {} - self._included_declarations = set() - self._anonymous_counter = 0 - self._structnode2type = weakref.WeakKeyDictionary() - self._options = {} - self._int_constants = {} - self._recomplete = [] - self._uses_new_feature = None - - def _parse(self, csource): - csource, macros = _preprocess(csource) - # XXX: for more efficiency we would need to poke into the - # internals of CParser... the following registers the - # typedefs, because their presence or absence influences the - # parsing itself (but what they are typedef'ed to plays no role) - ctn = _common_type_names(csource) - typenames = [] - for name in sorted(self._declarations): - if name.startswith('typedef '): - name = name[8:] - typenames.append(name) - ctn.discard(name) - typenames += sorted(ctn) - # - csourcelines = [] - csourcelines.append('# 1 ""') - for typename in typenames: - csourcelines.append('typedef int %s;' % typename) - csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' - ' __dotdotdot__;') - # this forces pycparser to consider the following in the file - # called from line 1 - csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) - csourcelines.append(csource) - csourcelines.append('') # see test_missing_newline_bug - fullcsource = '\n'.join(csourcelines) - if lock is not None: - lock.acquire() # pycparser is not thread-safe... - try: - ast = _get_parser().parse(fullcsource) - except pycparser.c_parser.ParseError as e: - self.convert_pycparser_error(e, csource) - finally: - if lock is not None: - lock.release() - # csource will be used to find buggy source text - return ast, macros, csource - - def _convert_pycparser_error(self, e, csource): - # xxx look for ":NUM:" at the start of str(e) - # and interpret that as a line number. This will not work if - # the user gives explicit ``# NUM "FILE"`` directives. - line = None - msg = str(e) - match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) - if match: - linenum = int(match.group(1), 10) - csourcelines = csource.splitlines() - if 1 <= linenum <= len(csourcelines): - line = csourcelines[linenum-1] - return line - - def convert_pycparser_error(self, e, csource): - line = self._convert_pycparser_error(e, csource) - - msg = str(e) - if line: - msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) - else: - msg = 'parse error\n%s' % (msg,) - raise CDefError(msg) - - def parse(self, csource, override=False, packed=False, pack=None, - dllexport=False): - if packed: - if packed != True: - raise ValueError("'packed' should be False or True; use " - "'pack' to give another value") - if pack: - raise ValueError("cannot give both 'pack' and 'packed'") - pack = 1 - elif pack: - if pack & (pack - 1): - raise ValueError("'pack' must be a power of two, not %r" % - (pack,)) - else: - pack = 0 - prev_options = self._options - try: - self._options = {'override': override, - 'packed': pack, - 'dllexport': dllexport} - self._internal_parse(csource) - finally: - self._options = prev_options - - def _internal_parse(self, csource): - ast, macros, csource = self._parse(csource) - # add the macros - self._process_macros(macros) - # find the first "__dotdotdot__" and use that as a separator - # between the repeated typedefs and the real csource - iterator = iter(ast.ext) - for decl in iterator: - if decl.name == '__dotdotdot__': - break - else: - assert 0 - current_decl = None - # - try: - self._inside_extern_python = '__cffi_extern_python_stop' - for decl in iterator: - current_decl = decl - if isinstance(decl, pycparser.c_ast.Decl): - self._parse_decl(decl) - elif isinstance(decl, pycparser.c_ast.Typedef): - if not decl.name: - raise CDefError("typedef does not declare any name", - decl) - quals = 0 - if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and - decl.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_type(decl) - elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and - isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and - isinstance(decl.type.type.type, - pycparser.c_ast.IdentifierType) and - decl.type.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_ptr_type(decl) - else: - realtype, quals = self._get_type_and_quals( - decl.type, name=decl.name, partial_length_ok=True, - typedef_example="*(%s *)0" % (decl.name,)) - self._declare('typedef ' + decl.name, realtype, quals=quals) - elif decl.__class__.__name__ == 'Pragma': - # skip pragma, only in pycparser 2.15 - import warnings - warnings.warn( - "#pragma in cdef() are entirely ignored. " - "They should be removed for now, otherwise your " - "code might behave differently in a future version " - "of CFFI if #pragma support gets added. Note that " - "'#pragma pack' needs to be replaced with the " - "'packed' keyword argument to cdef().") - else: - raise CDefError("unexpected <%s>: this construct is valid " - "C but not valid in cdef()" % - decl.__class__.__name__, decl) - except CDefError as e: - if len(e.args) == 1: - e.args = e.args + (current_decl,) - raise - except FFIError as e: - msg = self._convert_pycparser_error(e, csource) - if msg: - e.args = (e.args[0] + "\n *** Err: %s" % msg,) - raise - - def _add_constants(self, key, val): - if key in self._int_constants: - if self._int_constants[key] == val: - return # ignore identical double declarations - raise FFIError( - "multiple declarations of constant: %s" % (key,)) - self._int_constants[key] = val - - def _add_integer_constant(self, name, int_str): - int_str = int_str.lower().rstrip("ul") - neg = int_str.startswith('-') - if neg: - int_str = int_str[1:] - # "010" is not valid oct in py3 - if (int_str.startswith("0") and int_str != '0' - and not int_str.startswith("0x")): - int_str = "0o" + int_str[1:] - pyvalue = int(int_str, 0) - if neg: - pyvalue = -pyvalue - self._add_constants(name, pyvalue) - self._declare('macro ' + name, pyvalue) - - def _process_macros(self, macros): - for key, value in macros.items(): - value = value.strip() - if _r_int_literal.match(value): - self._add_integer_constant(key, value) - elif value == '...': - self._declare('macro ' + key, value) - else: - raise CDefError( - 'only supports one of the following syntax:\n' - ' #define %s ... (literally dot-dot-dot)\n' - ' #define %s NUMBER (with NUMBER an integer' - ' constant, decimal/hex/octal)\n' - 'got:\n' - ' #define %s %s' - % (key, key, key, value)) - - def _declare_function(self, tp, quals, decl): - tp = self._get_type_pointer(tp, quals) - if self._options.get('dllexport'): - tag = 'dllexport_python ' - elif self._inside_extern_python == '__cffi_extern_python_start': - tag = 'extern_python ' - elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': - tag = 'extern_python_plus_c ' - else: - tag = 'function ' - self._declare(tag + decl.name, tp) - - def _parse_decl(self, decl): - node = decl.type - if isinstance(node, pycparser.c_ast.FuncDecl): - tp, quals = self._get_type_and_quals(node, name=decl.name) - assert isinstance(tp, model.RawFunctionType) - self._declare_function(tp, quals, decl) - else: - if isinstance(node, pycparser.c_ast.Struct): - self._get_struct_union_enum_type('struct', node) - elif isinstance(node, pycparser.c_ast.Union): - self._get_struct_union_enum_type('union', node) - elif isinstance(node, pycparser.c_ast.Enum): - self._get_struct_union_enum_type('enum', node) - elif not decl.name: - raise CDefError("construct does not declare any variable", - decl) - # - if decl.name: - tp, quals = self._get_type_and_quals(node, - partial_length_ok=True) - if tp.is_raw_function: - self._declare_function(tp, quals, decl) - elif (tp.is_integer_type() and - hasattr(decl, 'init') and - hasattr(decl.init, 'value') and - _r_int_literal.match(decl.init.value)): - self._add_integer_constant(decl.name, decl.init.value) - elif (tp.is_integer_type() and - isinstance(decl.init, pycparser.c_ast.UnaryOp) and - decl.init.op == '-' and - hasattr(decl.init.expr, 'value') and - _r_int_literal.match(decl.init.expr.value)): - self._add_integer_constant(decl.name, - '-' + decl.init.expr.value) - elif (tp is model.void_type and - decl.name.startswith('__cffi_extern_python_')): - # hack: `extern "Python"` in the C source is replaced - # with "void __cffi_extern_python_start;" and - # "void __cffi_extern_python_stop;" - self._inside_extern_python = decl.name - else: - if self._inside_extern_python !='__cffi_extern_python_stop': - raise CDefError( - "cannot declare constants or " - "variables with 'extern \"Python\"'") - if (quals & model.Q_CONST) and not tp.is_array_type: - self._declare('constant ' + decl.name, tp, quals=quals) - else: - _warn_for_non_extern_non_static_global_variable(decl) - self._declare('variable ' + decl.name, tp, quals=quals) - - def parse_type(self, cdecl): - return self.parse_type_and_quals(cdecl)[0] - - def parse_type_and_quals(self, cdecl): - ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] - assert not macros - exprnode = ast.ext[-1].type.args.params[0] - if isinstance(exprnode, pycparser.c_ast.ID): - raise CDefError("unknown identifier '%s'" % (exprnode.name,)) - return self._get_type_and_quals(exprnode.type) - - def _declare(self, name, obj, included=False, quals=0): - if name in self._declarations: - prevobj, prevquals = self._declarations[name] - if prevobj is obj and prevquals == quals: - return - if not self._options.get('override'): - raise FFIError( - "multiple declarations of %s (for interactive usage, " - "try cdef(xx, override=True))" % (name,)) - assert '__dotdotdot__' not in name.split() - self._declarations[name] = (obj, quals) - if included: - self._included_declarations.add(obj) - - def _extract_quals(self, type): - quals = 0 - if isinstance(type, (pycparser.c_ast.TypeDecl, - pycparser.c_ast.PtrDecl)): - if 'const' in type.quals: - quals |= model.Q_CONST - if 'volatile' in type.quals: - quals |= model.Q_VOLATILE - if 'restrict' in type.quals: - quals |= model.Q_RESTRICT - return quals - - def _get_type_pointer(self, type, quals, declname=None): - if isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - if (isinstance(type, model.StructOrUnionOrEnum) and - type.name.startswith('$') and type.name[1:].isdigit() and - type.forcename is None and declname is not None): - return model.NamedPointerType(type, declname, quals) - return model.PointerType(type, quals) - - def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, - typedef_example=None): - # first, dereference typedefs, if we have it already parsed, we're good - if (isinstance(typenode, pycparser.c_ast.TypeDecl) and - isinstance(typenode.type, pycparser.c_ast.IdentifierType) and - len(typenode.type.names) == 1 and - ('typedef ' + typenode.type.names[0]) in self._declarations): - tp, quals = self._declarations['typedef ' + typenode.type.names[0]] - quals |= self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.ArrayDecl): - # array type - if typenode.dim is None: - length = None - else: - length = self._parse_constant( - typenode.dim, partial_length_ok=partial_length_ok) - # a hack: in 'typedef int foo_t[...][...];', don't use '...' as - # the length but use directly the C expression that would be - # generated by recompiler.py. This lets the typedef be used in - # many more places within recompiler.py - if typedef_example is not None: - if length == '...': - length = '_cffi_array_len(%s)' % (typedef_example,) - typedef_example = "*" + typedef_example - # - tp, quals = self._get_type_and_quals(typenode.type, - partial_length_ok=partial_length_ok, - typedef_example=typedef_example) - return model.ArrayType(tp, length), quals - # - if isinstance(typenode, pycparser.c_ast.PtrDecl): - # pointer type - itemtype, itemquals = self._get_type_and_quals(typenode.type) - tp = self._get_type_pointer(itemtype, itemquals, declname=name) - quals = self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.TypeDecl): - quals = self._extract_quals(typenode) - type = typenode.type - if isinstance(type, pycparser.c_ast.IdentifierType): - # assume a primitive type. get it from .names, but reduce - # synonyms to a single chosen combination - names = list(type.names) - if names != ['signed', 'char']: # keep this unmodified - prefixes = {} - while names: - name = names[0] - if name in ('short', 'long', 'signed', 'unsigned'): - prefixes[name] = prefixes.get(name, 0) + 1 - del names[0] - else: - break - # ignore the 'signed' prefix below, and reorder the others - newnames = [] - for prefix in ('unsigned', 'short', 'long'): - for i in range(prefixes.get(prefix, 0)): - newnames.append(prefix) - if not names: - names = ['int'] # implicitly - if names == ['int']: # but kill it if 'short' or 'long' - if 'short' in prefixes or 'long' in prefixes: - names = [] - names = newnames + names - ident = ' '.join(names) - if ident == 'void': - return model.void_type, quals - if ident == '__dotdotdot__': - raise FFIError(':%d: bad usage of "..."' % - typenode.coord.line) - tp0, quals0 = resolve_common_type(self, ident) - return tp0, (quals | quals0) - # - if isinstance(type, pycparser.c_ast.Struct): - # 'struct foobar' - tp = self._get_struct_union_enum_type('struct', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Union): - # 'union foobar' - tp = self._get_struct_union_enum_type('union', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Enum): - # 'enum foobar' - tp = self._get_struct_union_enum_type('enum', type, name) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.FuncDecl): - # a function type - return self._parse_function_type(typenode, name), 0 - # - # nested anonymous structs or unions end up here - if isinstance(typenode, pycparser.c_ast.Struct): - return self._get_struct_union_enum_type('struct', typenode, name, - nested=True), 0 - if isinstance(typenode, pycparser.c_ast.Union): - return self._get_struct_union_enum_type('union', typenode, name, - nested=True), 0 - # - raise FFIError(":%d: bad or unsupported type declaration" % - typenode.coord.line) - - def _parse_function_type(self, typenode, funcname=None): - params = list(getattr(typenode.args, 'params', [])) - for i, arg in enumerate(params): - if not hasattr(arg, 'type'): - raise CDefError("%s arg %d: unknown type '%s'" - " (if you meant to use the old C syntax of giving" - " untyped arguments, it is not supported)" - % (funcname or 'in expression', i + 1, - getattr(arg, 'name', '?'))) - ellipsis = ( - len(params) > 0 and - isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and - isinstance(params[-1].type.type, - pycparser.c_ast.IdentifierType) and - params[-1].type.type.names == ['__dotdotdot__']) - if ellipsis: - params.pop() - if not params: - raise CDefError( - "%s: a function with only '(...)' as argument" - " is not correct C" % (funcname or 'in expression')) - args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) - for argdeclnode in params] - if not ellipsis and args == [model.void_type]: - args = [] - result, quals = self._get_type_and_quals(typenode.type) - # the 'quals' on the result type are ignored. HACK: we absure them - # to detect __stdcall functions: we textually replace "__stdcall" - # with "volatile volatile const" above. - abi = None - if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway - if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: - abi = '__stdcall' - return model.RawFunctionType(tuple(args), result, ellipsis, abi) - - def _as_func_arg(self, type, quals): - if isinstance(type, model.ArrayType): - return model.PointerType(type.item, quals) - elif isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - else: - return type - - def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): - # First, a level of caching on the exact 'type' node of the AST. - # This is obscure, but needed because pycparser "unrolls" declarations - # such as "typedef struct { } foo_t, *foo_p" and we end up with - # an AST that is not a tree, but a DAG, with the "type" node of the - # two branches foo_t and foo_p of the trees being the same node. - # It's a bit silly but detecting "DAG-ness" in the AST tree seems - # to be the only way to distinguish this case from two independent - # structs. See test_struct_with_two_usages. - try: - return self._structnode2type[type] - except KeyError: - pass - # - # Note that this must handle parsing "struct foo" any number of - # times and always return the same StructType object. Additionally, - # one of these times (not necessarily the first), the fields of - # the struct can be specified with "struct foo { ...fields... }". - # If no name is given, then we have to create a new anonymous struct - # with no caching; in this case, the fields are either specified - # right now or never. - # - force_name = name - name = type.name - # - # get the type or create it if needed - if name is None: - # 'force_name' is used to guess a more readable name for - # anonymous structs, for the common case "typedef struct { } foo". - if force_name is not None: - explicit_name = '$%s' % force_name - else: - self._anonymous_counter += 1 - explicit_name = '$%d' % self._anonymous_counter - tp = None - else: - explicit_name = name - key = '%s %s' % (kind, name) - tp, _ = self._declarations.get(key, (None, None)) - # - if tp is None: - if kind == 'struct': - tp = model.StructType(explicit_name, None, None, None) - elif kind == 'union': - tp = model.UnionType(explicit_name, None, None, None) - elif kind == 'enum': - if explicit_name == '__dotdotdot__': - raise CDefError("Enums cannot be declared with ...") - tp = self._build_enum_type(explicit_name, type.values) - else: - raise AssertionError("kind = %r" % (kind,)) - if name is not None: - self._declare(key, tp) - else: - if kind == 'enum' and type.values is not None: - raise NotImplementedError( - "enum %s: the '{}' declaration should appear on the first " - "time the enum is mentioned, not later" % explicit_name) - if not tp.forcename: - tp.force_the_name(force_name) - if tp.forcename and '$' in tp.name: - self._declare('anonymous %s' % tp.forcename, tp) - # - self._structnode2type[type] = tp - # - # enums: done here - if kind == 'enum': - return tp - # - # is there a 'type.decls'? If yes, then this is the place in the - # C sources that declare the fields. If no, then just return the - # existing type, possibly still incomplete. - if type.decls is None: - return tp - # - if tp.fldnames is not None: - raise CDefError("duplicate declaration of struct %s" % name) - fldnames = [] - fldtypes = [] - fldbitsize = [] - fldquals = [] - for decl in type.decls: - if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and - ''.join(decl.type.names) == '__dotdotdot__'): - # XXX pycparser is inconsistent: 'names' should be a list - # of strings, but is sometimes just one string. Use - # str.join() as a way to cope with both. - self._make_partial(tp, nested) - continue - if decl.bitsize is None: - bitsize = -1 - else: - bitsize = self._parse_constant(decl.bitsize) - self._partial_length = False - type, fqual = self._get_type_and_quals(decl.type, - partial_length_ok=True) - if self._partial_length: - self._make_partial(tp, nested) - if isinstance(type, model.StructType) and type.partial: - self._make_partial(tp, nested) - fldnames.append(decl.name or '') - fldtypes.append(type) - fldbitsize.append(bitsize) - fldquals.append(fqual) - tp.fldnames = tuple(fldnames) - tp.fldtypes = tuple(fldtypes) - tp.fldbitsize = tuple(fldbitsize) - tp.fldquals = tuple(fldquals) - if fldbitsize != [-1] * len(fldbitsize): - if isinstance(tp, model.StructType) and tp.partial: - raise NotImplementedError("%s: using both bitfields and '...;'" - % (tp,)) - tp.packed = self._options.get('packed') - if tp.completed: # must be re-completed: it is not opaque any more - tp.completed = 0 - self._recomplete.append(tp) - return tp - - def _make_partial(self, tp, nested): - if not isinstance(tp, model.StructOrUnion): - raise CDefError("%s cannot be partial" % (tp,)) - if not tp.has_c_name() and not nested: - raise NotImplementedError("%s is partial but has no C name" %(tp,)) - tp.partial = True - - def _parse_constant(self, exprnode, partial_length_ok=False): - # for now, limited to expressions that are an immediate number - # or positive/negative number - if isinstance(exprnode, pycparser.c_ast.Constant): - s = exprnode.value - if '0' <= s[0] <= '9': - s = s.rstrip('uUlL') - try: - if s.startswith('0'): - return int(s, 8) - else: - return int(s, 10) - except ValueError: - if len(s) > 1: - if s.lower()[0:2] == '0x': - return int(s, 16) - elif s.lower()[0:2] == '0b': - return int(s, 2) - raise CDefError("invalid constant %r" % (s,)) - elif s[0] == "'" and s[-1] == "'" and ( - len(s) == 3 or (len(s) == 4 and s[1] == "\\")): - return ord(s[-2]) - else: - raise CDefError("invalid constant %r" % (s,)) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '+'): - return self._parse_constant(exprnode.expr) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '-'): - return -self._parse_constant(exprnode.expr) - # load previously defined int constant - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name in self._int_constants): - return self._int_constants[exprnode.name] - # - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name == '__dotdotdotarray__'): - if partial_length_ok: - self._partial_length = True - return '...' - raise FFIError(":%d: unsupported '[...]' here, cannot derive " - "the actual array length in this context" - % exprnode.coord.line) - # - if isinstance(exprnode, pycparser.c_ast.BinaryOp): - left = self._parse_constant(exprnode.left) - right = self._parse_constant(exprnode.right) - if exprnode.op == '+': - return left + right - elif exprnode.op == '-': - return left - right - elif exprnode.op == '*': - return left * right - elif exprnode.op == '/': - return self._c_div(left, right) - elif exprnode.op == '%': - return left - self._c_div(left, right) * right - elif exprnode.op == '<<': - return left << right - elif exprnode.op == '>>': - return left >> right - elif exprnode.op == '&': - return left & right - elif exprnode.op == '|': - return left | right - elif exprnode.op == '^': - return left ^ right - # - raise FFIError(":%d: unsupported expression: expected a " - "simple numeric constant" % exprnode.coord.line) - - def _c_div(self, a, b): - result = a // b - if ((a < 0) ^ (b < 0)) and (a % b) != 0: - result += 1 - return result - - def _build_enum_type(self, explicit_name, decls): - if decls is not None: - partial = False - enumerators = [] - enumvalues = [] - nextenumvalue = 0 - for enum in decls.enumerators: - if _r_enum_dotdotdot.match(enum.name): - partial = True - continue - if enum.value is not None: - nextenumvalue = self._parse_constant(enum.value) - enumerators.append(enum.name) - enumvalues.append(nextenumvalue) - self._add_constants(enum.name, nextenumvalue) - nextenumvalue += 1 - enumerators = tuple(enumerators) - enumvalues = tuple(enumvalues) - tp = model.EnumType(explicit_name, enumerators, enumvalues) - tp.partial = partial - else: # opaque enum - tp = model.EnumType(explicit_name, (), ()) - return tp - - def include(self, other): - for name, (tp, quals) in other._declarations.items(): - if name.startswith('anonymous $enum_$'): - continue # fix for test_anonymous_enum_include - kind = name.split(' ', 1)[0] - if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): - self._declare(name, tp, included=True, quals=quals) - for k, v in other._int_constants.items(): - self._add_constants(k, v) - - def _get_unknown_type(self, decl): - typenames = decl.type.type.names - if typenames == ['__dotdotdot__']: - return model.unknown_type(decl.name) - - if typenames == ['__dotdotdotint__']: - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef int... %s'" % decl.name - return model.UnknownIntegerType(decl.name) - - if typenames == ['__dotdotdotfloat__']: - # note: not for 'long double' so far - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef float... %s'" % decl.name - return model.UnknownFloatType(decl.name) - - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) - - def _get_unknown_ptr_type(self, decl): - if decl.type.type.type.names == ['__dotdotdot__']: - return model.unknown_ptr_type(decl.name) - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) diff --git a/venv/lib/python3.10/site-packages/cffi/error.py b/venv/lib/python3.10/site-packages/cffi/error.py deleted file mode 100644 index 0a27247..0000000 --- a/venv/lib/python3.10/site-packages/cffi/error.py +++ /dev/null @@ -1,31 +0,0 @@ - -class FFIError(Exception): - __module__ = 'cffi' - -class CDefError(Exception): - __module__ = 'cffi' - def __str__(self): - try: - current_decl = self.args[1] - filename = current_decl.coord.file - linenum = current_decl.coord.line - prefix = '%s:%d: ' % (filename, linenum) - except (AttributeError, TypeError, IndexError): - prefix = '' - return '%s%s' % (prefix, self.args[0]) - -class VerificationError(Exception): - """ An error raised when verification fails - """ - __module__ = 'cffi' - -class VerificationMissing(Exception): - """ An error raised when incomplete structures are passed into - cdef, but no verification has been done - """ - __module__ = 'cffi' - -class PkgConfigError(Exception): - """ An error raised for missing modules in pkg-config - """ - __module__ = 'cffi' diff --git a/venv/lib/python3.10/site-packages/cffi/ffiplatform.py b/venv/lib/python3.10/site-packages/cffi/ffiplatform.py deleted file mode 100644 index adca28f..0000000 --- a/venv/lib/python3.10/site-packages/cffi/ffiplatform.py +++ /dev/null @@ -1,113 +0,0 @@ -import sys, os -from .error import VerificationError - - -LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', - 'extra_objects', 'depends'] - -def get_extension(srcfilename, modname, sources=(), **kwds): - from cffi._shimmed_dist_utils import Extension - allsources = [srcfilename] - for src in sources: - allsources.append(os.path.normpath(src)) - return Extension(name=modname, sources=allsources, **kwds) - -def compile(tmpdir, ext, compiler_verbose=0, debug=None): - """Compile a C extension module using distutils.""" - - saved_environ = os.environ.copy() - try: - outputfilename = _build(tmpdir, ext, compiler_verbose, debug) - outputfilename = os.path.abspath(outputfilename) - finally: - # workaround for a distutils bugs where some env vars can - # become longer and longer every time it is used - for key, value in saved_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - return outputfilename - -def _build(tmpdir, ext, compiler_verbose=0, debug=None): - # XXX compact but horrible :-( - from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity - - dist = Distribution({'ext_modules': [ext]}) - dist.parse_config_files() - options = dist.get_option_dict('build_ext') - if debug is None: - debug = sys.flags.debug - options['debug'] = ('ffiplatform', debug) - options['force'] = ('ffiplatform', True) - options['build_lib'] = ('ffiplatform', tmpdir) - options['build_temp'] = ('ffiplatform', tmpdir) - # - try: - old_level = set_threshold(0) or 0 - try: - set_verbosity(compiler_verbose) - dist.run_command('build_ext') - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() - finally: - set_threshold(old_level) - except (CompileError, LinkError) as e: - raise VerificationError('%s: %s' % (e.__class__.__name__, e)) - # - return soname - -try: - from os.path import samefile -except ImportError: - def samefile(f1, f2): - return os.path.abspath(f1) == os.path.abspath(f2) - -def maybe_relative_path(path): - if not os.path.isabs(path): - return path # already relative - dir = path - names = [] - while True: - prevdir = dir - dir, name = os.path.split(prevdir) - if dir == prevdir or not dir: - return path # failed to make it relative - names.append(name) - try: - if samefile(dir, os.curdir): - names.reverse() - return os.path.join(*names) - except OSError: - pass - -# ____________________________________________________________ - -try: - int_or_long = (int, long) - import cStringIO -except NameError: - int_or_long = int # Python 3 - import io as cStringIO - -def _flatten(x, f): - if isinstance(x, str): - f.write('%ds%s' % (len(x), x)) - elif isinstance(x, dict): - keys = sorted(x.keys()) - f.write('%dd' % len(keys)) - for key in keys: - _flatten(key, f) - _flatten(x[key], f) - elif isinstance(x, (list, tuple)): - f.write('%dl' % len(x)) - for value in x: - _flatten(value, f) - elif isinstance(x, int_or_long): - f.write('%di' % (x,)) - else: - raise TypeError( - "the keywords to verify() contains unsupported object %r" % (x,)) - -def flatten(x): - f = cStringIO.StringIO() - _flatten(x, f) - return f.getvalue() diff --git a/venv/lib/python3.10/site-packages/cffi/lock.py b/venv/lib/python3.10/site-packages/cffi/lock.py deleted file mode 100644 index db91b71..0000000 --- a/venv/lib/python3.10/site-packages/cffi/lock.py +++ /dev/null @@ -1,30 +0,0 @@ -import sys - -if sys.version_info < (3,): - try: - from thread import allocate_lock - except ImportError: - from dummy_thread import allocate_lock -else: - try: - from _thread import allocate_lock - except ImportError: - from _dummy_thread import allocate_lock - - -##import sys -##l1 = allocate_lock - -##class allocate_lock(object): -## def __init__(self): -## self._real = l1() -## def __enter__(self): -## for i in range(4, 0, -1): -## print sys._getframe(i).f_code -## print -## return self._real.__enter__() -## def __exit__(self, *args): -## return self._real.__exit__(*args) -## def acquire(self, f): -## assert f is False -## return self._real.acquire(f) diff --git a/venv/lib/python3.10/site-packages/cffi/model.py b/venv/lib/python3.10/site-packages/cffi/model.py deleted file mode 100644 index e5f4cae..0000000 --- a/venv/lib/python3.10/site-packages/cffi/model.py +++ /dev/null @@ -1,618 +0,0 @@ -import types -import weakref - -from .lock import allocate_lock -from .error import CDefError, VerificationError, VerificationMissing - -# type qualifiers -Q_CONST = 0x01 -Q_RESTRICT = 0x02 -Q_VOLATILE = 0x04 - -def qualify(quals, replace_with): - if quals & Q_CONST: - replace_with = ' const ' + replace_with.lstrip() - if quals & Q_VOLATILE: - replace_with = ' volatile ' + replace_with.lstrip() - if quals & Q_RESTRICT: - # It seems that __restrict is supported by gcc and msvc. - # If you hit some different compiler, add a #define in - # _cffi_include.h for it (and in its copies, documented there) - replace_with = ' __restrict ' + replace_with.lstrip() - return replace_with - - -class BaseTypeByIdentity(object): - is_array_type = False - is_raw_function = False - - def get_c_name(self, replace_with='', context='a C file', quals=0): - result = self.c_name_with_marker - assert result.count('&') == 1 - # some logic duplication with ffi.getctype()... :-( - replace_with = replace_with.strip() - if replace_with: - if replace_with.startswith('*') and '&[' in result: - replace_with = '(%s)' % replace_with - elif not replace_with[0] in '[(': - replace_with = ' ' + replace_with - replace_with = qualify(quals, replace_with) - result = result.replace('&', replace_with) - if '$' in result: - raise VerificationError( - "cannot generate '%s' in %s: unknown type name" - % (self._get_c_name(), context)) - return result - - def _get_c_name(self): - return self.c_name_with_marker.replace('&', '') - - def has_c_name(self): - return '$' not in self._get_c_name() - - def is_integer_type(self): - return False - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - try: - BType = ffi._cached_btypes[self] - except KeyError: - BType = self.build_backend_type(ffi, finishlist) - BType2 = ffi._cached_btypes.setdefault(self, BType) - assert BType2 is BType - return BType - - def __repr__(self): - return '<%s>' % (self._get_c_name(),) - - def _get_items(self): - return [(name, getattr(self, name)) for name in self._attrs_] - - -class BaseType(BaseTypeByIdentity): - - def __eq__(self, other): - return (self.__class__ == other.__class__ and - self._get_items() == other._get_items()) - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.__class__, tuple(self._get_items()))) - - -class VoidType(BaseType): - _attrs_ = () - - def __init__(self): - self.c_name_with_marker = 'void&' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_void_type') - -void_type = VoidType() - - -class BasePrimitiveType(BaseType): - def is_complex_type(self): - return False - - -class PrimitiveType(BasePrimitiveType): - _attrs_ = ('name',) - - ALL_PRIMITIVE_TYPES = { - 'char': 'c', - 'short': 'i', - 'int': 'i', - 'long': 'i', - 'long long': 'i', - 'signed char': 'i', - 'unsigned char': 'i', - 'unsigned short': 'i', - 'unsigned int': 'i', - 'unsigned long': 'i', - 'unsigned long long': 'i', - 'float': 'f', - 'double': 'f', - 'long double': 'f', - '_cffi_float_complex_t': 'j', - '_cffi_double_complex_t': 'j', - '_Bool': 'i', - # the following types are not primitive in the C sense - 'wchar_t': 'c', - 'char16_t': 'c', - 'char32_t': 'c', - 'int8_t': 'i', - 'uint8_t': 'i', - 'int16_t': 'i', - 'uint16_t': 'i', - 'int32_t': 'i', - 'uint32_t': 'i', - 'int64_t': 'i', - 'uint64_t': 'i', - 'int_least8_t': 'i', - 'uint_least8_t': 'i', - 'int_least16_t': 'i', - 'uint_least16_t': 'i', - 'int_least32_t': 'i', - 'uint_least32_t': 'i', - 'int_least64_t': 'i', - 'uint_least64_t': 'i', - 'int_fast8_t': 'i', - 'uint_fast8_t': 'i', - 'int_fast16_t': 'i', - 'uint_fast16_t': 'i', - 'int_fast32_t': 'i', - 'uint_fast32_t': 'i', - 'int_fast64_t': 'i', - 'uint_fast64_t': 'i', - 'intptr_t': 'i', - 'uintptr_t': 'i', - 'intmax_t': 'i', - 'uintmax_t': 'i', - 'ptrdiff_t': 'i', - 'size_t': 'i', - 'ssize_t': 'i', - } - - def __init__(self, name): - assert name in self.ALL_PRIMITIVE_TYPES - self.name = name - self.c_name_with_marker = name + '&' - - def is_char_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' - def is_integer_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' - def is_float_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' - def is_complex_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_primitive_type', self.name) - - -class UnknownIntegerType(BasePrimitiveType): - _attrs_ = ('name',) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def is_integer_type(self): - return True - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("integer type '%s' can only be used after " - "compilation" % self.name) - -class UnknownFloatType(BasePrimitiveType): - _attrs_ = ('name', ) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("float type '%s' can only be used after " - "compilation" % self.name) - - -class BaseFunctionType(BaseType): - _attrs_ = ('args', 'result', 'ellipsis', 'abi') - - def __init__(self, args, result, ellipsis, abi=None): - self.args = args - self.result = result - self.ellipsis = ellipsis - self.abi = abi - # - reprargs = [arg._get_c_name() for arg in self.args] - if self.ellipsis: - reprargs.append('...') - reprargs = reprargs or ['void'] - replace_with = self._base_pattern % (', '.join(reprargs),) - if abi is not None: - replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] - self.c_name_with_marker = ( - self.result.c_name_with_marker.replace('&', replace_with)) - - -class RawFunctionType(BaseFunctionType): - # Corresponds to a C type like 'int(int)', which is the C type of - # a function, but not a pointer-to-function. The backend has no - # notion of such a type; it's used temporarily by parsing. - _base_pattern = '(&)(%s)' - is_raw_function = True - - def build_backend_type(self, ffi, finishlist): - raise CDefError("cannot render the type %r: it is a function " - "type, not a pointer-to-function type" % (self,)) - - def as_function_pointer(self): - return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) - - -class FunctionPtrType(BaseFunctionType): - _base_pattern = '(*&)(%s)' - - def build_backend_type(self, ffi, finishlist): - result = self.result.get_cached_btype(ffi, finishlist) - args = [] - for tp in self.args: - args.append(tp.get_cached_btype(ffi, finishlist)) - abi_args = () - if self.abi == "__stdcall": - if not self.ellipsis: # __stdcall ignored for variadic funcs - try: - abi_args = (ffi._backend.FFI_STDCALL,) - except AttributeError: - pass - return global_cache(self, ffi, 'new_function_type', - tuple(args), result, self.ellipsis, *abi_args) - - def as_raw_function(self): - return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) - - -class PointerType(BaseType): - _attrs_ = ('totype', 'quals') - - def __init__(self, totype, quals=0): - self.totype = totype - self.quals = quals - extra = " *&" - if totype.is_array_type: - extra = "(%s)" % (extra.lstrip(),) - extra = qualify(quals, extra) - self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) - - def build_backend_type(self, ffi, finishlist): - BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) - return global_cache(self, ffi, 'new_pointer_type', BItem) - -voidp_type = PointerType(void_type) - -def ConstPointerType(totype): - return PointerType(totype, Q_CONST) - -const_voidp_type = ConstPointerType(void_type) - - -class NamedPointerType(PointerType): - _attrs_ = ('totype', 'name') - - def __init__(self, totype, name, quals=0): - PointerType.__init__(self, totype, quals) - self.name = name - self.c_name_with_marker = name + '&' - - -class ArrayType(BaseType): - _attrs_ = ('item', 'length') - is_array_type = True - - def __init__(self, item, length): - self.item = item - self.length = length - # - if length is None: - brackets = '&[]' - elif length == '...': - brackets = '&[/*...*/]' - else: - brackets = '&[%s]' % length - self.c_name_with_marker = ( - self.item.c_name_with_marker.replace('&', brackets)) - - def length_is_unknown(self): - return isinstance(self.length, str) - - def resolve_length(self, newlength): - return ArrayType(self.item, newlength) - - def build_backend_type(self, ffi, finishlist): - if self.length_is_unknown(): - raise CDefError("cannot render the type %r: unknown length" % - (self,)) - self.item.get_cached_btype(ffi, finishlist) # force the item BType - BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) - return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) - -char_array_type = ArrayType(PrimitiveType('char'), None) - - -class StructOrUnionOrEnum(BaseTypeByIdentity): - _attrs_ = ('name',) - forcename = None - - def build_c_name_with_marker(self): - name = self.forcename or '%s %s' % (self.kind, self.name) - self.c_name_with_marker = name + '&' - - def force_the_name(self, forcename): - self.forcename = forcename - self.build_c_name_with_marker() - - def get_official_name(self): - assert self.c_name_with_marker.endswith('&') - return self.c_name_with_marker[:-1] - - -class StructOrUnion(StructOrUnionOrEnum): - fixedlayout = None - completed = 0 - partial = False - packed = 0 - - def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): - self.name = name - self.fldnames = fldnames - self.fldtypes = fldtypes - self.fldbitsize = fldbitsize - self.fldquals = fldquals - self.build_c_name_with_marker() - - def anonymous_struct_fields(self): - if self.fldtypes is not None: - for name, type in zip(self.fldnames, self.fldtypes): - if name == '' and isinstance(type, StructOrUnion): - yield type - - def enumfields(self, expand_anonymous_struct_union=True): - fldquals = self.fldquals - if fldquals is None: - fldquals = (0,) * len(self.fldnames) - for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, - self.fldbitsize, fldquals): - if (name == '' and isinstance(type, StructOrUnion) - and expand_anonymous_struct_union): - # nested anonymous struct/union - for result in type.enumfields(): - yield result - else: - yield (name, type, bitsize, quals) - - def force_flatten(self): - # force the struct or union to have a declaration that lists - # directly all fields returned by enumfields(), flattening - # nested anonymous structs/unions. - names = [] - types = [] - bitsizes = [] - fldquals = [] - for name, type, bitsize, quals in self.enumfields(): - names.append(name) - types.append(type) - bitsizes.append(bitsize) - fldquals.append(quals) - self.fldnames = tuple(names) - self.fldtypes = tuple(types) - self.fldbitsize = tuple(bitsizes) - self.fldquals = tuple(fldquals) - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, - can_delay) - if not can_delay: - self.finish_backend_type(ffi, finishlist) - return BType - - def finish_backend_type(self, ffi, finishlist): - if self.completed: - if self.completed != 2: - raise NotImplementedError("recursive structure declaration " - "for '%s'" % (self.name,)) - return - BType = ffi._cached_btypes[self] - # - self.completed = 1 - # - if self.fldtypes is None: - pass # not completing it: it's an opaque struct - # - elif self.fixedlayout is None: - fldtypes = [tp.get_cached_btype(ffi, finishlist) - for tp in self.fldtypes] - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) - extra_flags = () - if self.packed: - if self.packed == 1: - extra_flags = (8,) # SF_PACKED - else: - extra_flags = (0, self.packed) - ffi._backend.complete_struct_or_union(BType, lst, self, - -1, -1, *extra_flags) - # - else: - fldtypes = [] - fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout - for i in range(len(self.fldnames)): - fsize = fieldsize[i] - ftype = self.fldtypes[i] - # - if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): - # fix the length to match the total size - BItemType = ftype.item.get_cached_btype(ffi, finishlist) - nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) - if nrest != 0: - self._verification_error( - "field '%s.%s' has a bogus size?" % ( - self.name, self.fldnames[i] or '{}')) - ftype = ftype.resolve_length(nlen) - self.fldtypes = (self.fldtypes[:i] + (ftype,) + - self.fldtypes[i+1:]) - # - BFieldType = ftype.get_cached_btype(ffi, finishlist) - if isinstance(ftype, ArrayType) and ftype.length is None: - assert fsize == 0 - else: - bitemsize = ffi.sizeof(BFieldType) - if bitemsize != fsize: - self._verification_error( - "field '%s.%s' is declared as %d bytes, but is " - "really %d bytes" % (self.name, - self.fldnames[i] or '{}', - bitemsize, fsize)) - fldtypes.append(BFieldType) - # - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) - ffi._backend.complete_struct_or_union(BType, lst, self, - totalsize, totalalignment) - self.completed = 2 - - def _verification_error(self, msg): - raise VerificationError(msg) - - def check_not_partial(self): - if self.partial and self.fixedlayout is None: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - finishlist.append(self) - # - return global_cache(self, ffi, 'new_%s_type' % self.kind, - self.get_official_name(), key=self) - - -class StructType(StructOrUnion): - kind = 'struct' - - -class UnionType(StructOrUnion): - kind = 'union' - - -class EnumType(StructOrUnionOrEnum): - kind = 'enum' - partial = False - partial_resolved = False - - def __init__(self, name, enumerators, enumvalues, baseinttype=None): - self.name = name - self.enumerators = enumerators - self.enumvalues = enumvalues - self.baseinttype = baseinttype - self.build_c_name_with_marker() - - def force_the_name(self, forcename): - StructOrUnionOrEnum.force_the_name(self, forcename) - if self.forcename is None: - name = self.get_official_name() - self.forcename = '$' + name.replace(' ', '_') - - def check_not_partial(self): - if self.partial and not self.partial_resolved: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - base_btype = self.build_baseinttype(ffi, finishlist) - return global_cache(self, ffi, 'new_enum_type', - self.get_official_name(), - self.enumerators, self.enumvalues, - base_btype, key=self) - - def build_baseinttype(self, ffi, finishlist): - if self.baseinttype is not None: - return self.baseinttype.get_cached_btype(ffi, finishlist) - # - if self.enumvalues: - smallest_value = min(self.enumvalues) - largest_value = max(self.enumvalues) - else: - import warnings - try: - # XXX! The goal is to ensure that the warnings.warn() - # will not suppress the warning. We want to get it - # several times if we reach this point several times. - __warningregistry__.clear() - except NameError: - pass - warnings.warn("%r has no values explicitly defined; " - "guessing that it is equivalent to 'unsigned int'" - % self._get_c_name()) - smallest_value = largest_value = 0 - if smallest_value < 0: # needs a signed type - sign = 1 - candidate1 = PrimitiveType("int") - candidate2 = PrimitiveType("long") - else: - sign = 0 - candidate1 = PrimitiveType("unsigned int") - candidate2 = PrimitiveType("unsigned long") - btype1 = candidate1.get_cached_btype(ffi, finishlist) - btype2 = candidate2.get_cached_btype(ffi, finishlist) - size1 = ffi.sizeof(btype1) - size2 = ffi.sizeof(btype2) - if (smallest_value >= ((-1) << (8*size1-1)) and - largest_value < (1 << (8*size1-sign))): - return btype1 - if (smallest_value >= ((-1) << (8*size2-1)) and - largest_value < (1 << (8*size2-sign))): - return btype2 - raise CDefError("%s values don't all fit into either 'long' " - "or 'unsigned long'" % self._get_c_name()) - -def unknown_type(name, structname=None): - if structname is None: - structname = '$%s' % name - tp = StructType(structname, None, None, None) - tp.force_the_name(name) - tp.origin = "unknown_type" - return tp - -def unknown_ptr_type(name, structname=None): - if structname is None: - structname = '$$%s' % name - tp = StructType(structname, None, None, None) - return NamedPointerType(tp, name) - - -global_lock = allocate_lock() -_typecache_cffi_backend = weakref.WeakValueDictionary() - -def get_typecache(backend): - # returns _typecache_cffi_backend if backend is the _cffi_backend - # module, or type(backend).__typecache if backend is an instance of - # CTypesBackend (or some FakeBackend class during tests) - if isinstance(backend, types.ModuleType): - return _typecache_cffi_backend - with global_lock: - if not hasattr(type(backend), '__typecache'): - type(backend).__typecache = weakref.WeakValueDictionary() - return type(backend).__typecache - -def global_cache(srctype, ffi, funcname, *args, **kwds): - key = kwds.pop('key', (funcname, args)) - assert not kwds - try: - return ffi._typecache[key] - except KeyError: - pass - try: - res = getattr(ffi._backend, funcname)(*args) - except NotImplementedError as e: - raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) - # note that setdefault() on WeakValueDictionary is not atomic - # and contains a rare bug (http://bugs.python.org/issue19542); - # we have to use a lock and do it ourselves - cache = ffi._typecache - with global_lock: - res1 = cache.get(key) - if res1 is None: - cache[key] = res - return res - else: - return res1 - -def pointer_cache(ffi, BType): - return global_cache('?', ffi, 'new_pointer_type', BType) - -def attach_exception_info(e, name): - if e.args and type(e.args[0]) is str: - e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/venv/lib/python3.10/site-packages/cffi/parse_c_type.h b/venv/lib/python3.10/site-packages/cffi/parse_c_type.h deleted file mode 100644 index 84e4ef8..0000000 --- a/venv/lib/python3.10/site-packages/cffi/parse_c_type.h +++ /dev/null @@ -1,181 +0,0 @@ - -/* This part is from file 'cffi/parse_c_type.h'. It is copied at the - beginning of C sources generated by CFFI's ffi.set_source(). */ - -typedef void *_cffi_opcode_t; - -#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) -#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) -#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) - -#define _CFFI_OP_PRIMITIVE 1 -#define _CFFI_OP_POINTER 3 -#define _CFFI_OP_ARRAY 5 -#define _CFFI_OP_OPEN_ARRAY 7 -#define _CFFI_OP_STRUCT_UNION 9 -#define _CFFI_OP_ENUM 11 -#define _CFFI_OP_FUNCTION 13 -#define _CFFI_OP_FUNCTION_END 15 -#define _CFFI_OP_NOOP 17 -#define _CFFI_OP_BITFIELD 19 -#define _CFFI_OP_TYPENAME 21 -#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs -#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs -#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) -#define _CFFI_OP_CONSTANT 29 -#define _CFFI_OP_CONSTANT_INT 31 -#define _CFFI_OP_GLOBAL_VAR 33 -#define _CFFI_OP_DLOPEN_FUNC 35 -#define _CFFI_OP_DLOPEN_CONST 37 -#define _CFFI_OP_GLOBAL_VAR_F 39 -#define _CFFI_OP_EXTERN_PYTHON 41 - -#define _CFFI_PRIM_VOID 0 -#define _CFFI_PRIM_BOOL 1 -#define _CFFI_PRIM_CHAR 2 -#define _CFFI_PRIM_SCHAR 3 -#define _CFFI_PRIM_UCHAR 4 -#define _CFFI_PRIM_SHORT 5 -#define _CFFI_PRIM_USHORT 6 -#define _CFFI_PRIM_INT 7 -#define _CFFI_PRIM_UINT 8 -#define _CFFI_PRIM_LONG 9 -#define _CFFI_PRIM_ULONG 10 -#define _CFFI_PRIM_LONGLONG 11 -#define _CFFI_PRIM_ULONGLONG 12 -#define _CFFI_PRIM_FLOAT 13 -#define _CFFI_PRIM_DOUBLE 14 -#define _CFFI_PRIM_LONGDOUBLE 15 - -#define _CFFI_PRIM_WCHAR 16 -#define _CFFI_PRIM_INT8 17 -#define _CFFI_PRIM_UINT8 18 -#define _CFFI_PRIM_INT16 19 -#define _CFFI_PRIM_UINT16 20 -#define _CFFI_PRIM_INT32 21 -#define _CFFI_PRIM_UINT32 22 -#define _CFFI_PRIM_INT64 23 -#define _CFFI_PRIM_UINT64 24 -#define _CFFI_PRIM_INTPTR 25 -#define _CFFI_PRIM_UINTPTR 26 -#define _CFFI_PRIM_PTRDIFF 27 -#define _CFFI_PRIM_SIZE 28 -#define _CFFI_PRIM_SSIZE 29 -#define _CFFI_PRIM_INT_LEAST8 30 -#define _CFFI_PRIM_UINT_LEAST8 31 -#define _CFFI_PRIM_INT_LEAST16 32 -#define _CFFI_PRIM_UINT_LEAST16 33 -#define _CFFI_PRIM_INT_LEAST32 34 -#define _CFFI_PRIM_UINT_LEAST32 35 -#define _CFFI_PRIM_INT_LEAST64 36 -#define _CFFI_PRIM_UINT_LEAST64 37 -#define _CFFI_PRIM_INT_FAST8 38 -#define _CFFI_PRIM_UINT_FAST8 39 -#define _CFFI_PRIM_INT_FAST16 40 -#define _CFFI_PRIM_UINT_FAST16 41 -#define _CFFI_PRIM_INT_FAST32 42 -#define _CFFI_PRIM_UINT_FAST32 43 -#define _CFFI_PRIM_INT_FAST64 44 -#define _CFFI_PRIM_UINT_FAST64 45 -#define _CFFI_PRIM_INTMAX 46 -#define _CFFI_PRIM_UINTMAX 47 -#define _CFFI_PRIM_FLOATCOMPLEX 48 -#define _CFFI_PRIM_DOUBLECOMPLEX 49 -#define _CFFI_PRIM_CHAR16 50 -#define _CFFI_PRIM_CHAR32 51 - -#define _CFFI__NUM_PRIM 52 -#define _CFFI__UNKNOWN_PRIM (-1) -#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) -#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) - -#define _CFFI__IO_FILE_STRUCT (-1) - - -struct _cffi_global_s { - const char *name; - void *address; - _cffi_opcode_t type_op; - void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown - // OP_CPYTHON_BLTN_*: addr of direct function -}; - -struct _cffi_getconst_s { - unsigned long long value; - const struct _cffi_type_context_s *ctx; - int gindex; -}; - -struct _cffi_struct_union_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_STRUCT_UNION - int flags; // _CFFI_F_* flags below - size_t size; - int alignment; - int first_field_index; // -> _cffi_fields array - int num_fields; -}; -#define _CFFI_F_UNION 0x01 // is a union, not a struct -#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the - // "standard layout" or if some are missing -#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct -#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() -#define _CFFI_F_OPAQUE 0x10 // opaque - -struct _cffi_field_s { - const char *name; - size_t field_offset; - size_t field_size; - _cffi_opcode_t field_type_op; -}; - -struct _cffi_enum_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_ENUM - int type_prim; // _CFFI_PRIM_xxx - const char *enumerators; // comma-delimited string -}; - -struct _cffi_typename_s { - const char *name; - int type_index; /* if opaque, points to a possibly artificial - OP_STRUCT which is itself opaque */ -}; - -struct _cffi_type_context_s { - _cffi_opcode_t *types; - const struct _cffi_global_s *globals; - const struct _cffi_field_s *fields; - const struct _cffi_struct_union_s *struct_unions; - const struct _cffi_enum_s *enums; - const struct _cffi_typename_s *typenames; - int num_globals; - int num_struct_unions; - int num_enums; - int num_typenames; - const char *const *includes; - int num_types; - int flags; /* future extension */ -}; - -struct _cffi_parse_info_s { - const struct _cffi_type_context_s *ctx; - _cffi_opcode_t *output; - unsigned int output_size; - size_t error_location; - const char *error_message; -}; - -struct _cffi_externpy_s { - const char *name; - size_t size_of_result; - void *reserved1, *reserved2; -}; - -#ifdef _CFFI_INTERNAL -static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); -static int search_in_globals(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -#endif diff --git a/venv/lib/python3.10/site-packages/cffi/pkgconfig.py b/venv/lib/python3.10/site-packages/cffi/pkgconfig.py deleted file mode 100644 index 5c93f15..0000000 --- a/venv/lib/python3.10/site-packages/cffi/pkgconfig.py +++ /dev/null @@ -1,121 +0,0 @@ -# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi -import sys, os, subprocess - -from .error import PkgConfigError - - -def merge_flags(cfg1, cfg2): - """Merge values from cffi config flags cfg2 to cf1 - - Example: - merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) - {"libraries": ["one", "two"]} - """ - for key, value in cfg2.items(): - if key not in cfg1: - cfg1[key] = value - else: - if not isinstance(cfg1[key], list): - raise TypeError("cfg1[%r] should be a list of strings" % (key,)) - if not isinstance(value, list): - raise TypeError("cfg2[%r] should be a list of strings" % (key,)) - cfg1[key].extend(value) - return cfg1 - - -def call(libname, flag, encoding=sys.getfilesystemencoding()): - """Calls pkg-config and returns the output if found - """ - a = ["pkg-config", "--print-errors"] - a.append(flag) - a.append(libname) - try: - pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except EnvironmentError as e: - raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) - - bout, berr = pc.communicate() - if pc.returncode != 0: - try: - berr = berr.decode(encoding) - except Exception: - pass - raise PkgConfigError(berr.strip()) - - if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x - try: - bout = bout.decode(encoding) - except UnicodeDecodeError: - raise PkgConfigError("pkg-config %s %s returned bytes that cannot " - "be decoded with encoding %r:\n%r" % - (flag, libname, encoding, bout)) - - if os.altsep != '\\' and '\\' in bout: - raise PkgConfigError("pkg-config %s %s returned an unsupported " - "backslash-escaped output:\n%r" % - (flag, libname, bout)) - return bout - - -def flags_from_pkgconfig(libs): - r"""Return compiler line flags for FFI.set_source based on pkg-config output - - Usage - ... - ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) - - If pkg-config is installed on build machine, then arguments include_dirs, - library_dirs, libraries, define_macros, extra_compile_args and - extra_link_args are extended with an output of pkg-config for libfoo and - libbar. - - Raises PkgConfigError in case the pkg-config call fails. - """ - - def get_include_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-I")] - - def get_library_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-L")] - - def get_libraries(string): - return [x[2:] for x in string.split() if x.startswith("-l")] - - # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils - def get_macros(string): - def _macro(x): - x = x[2:] # drop "-D" - if '=' in x: - return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") - else: - return (x, None) # "-Dfoo" => ("foo", None) - return [_macro(x) for x in string.split() if x.startswith("-D")] - - def get_other_cflags(string): - return [x for x in string.split() if not x.startswith("-I") and - not x.startswith("-D")] - - def get_other_libs(string): - return [x for x in string.split() if not x.startswith("-L") and - not x.startswith("-l")] - - # return kwargs for given libname - def kwargs(libname): - fse = sys.getfilesystemencoding() - all_cflags = call(libname, "--cflags") - all_libs = call(libname, "--libs") - return { - "include_dirs": get_include_dirs(all_cflags), - "library_dirs": get_library_dirs(all_libs), - "libraries": get_libraries(all_libs), - "define_macros": get_macros(all_cflags), - "extra_compile_args": get_other_cflags(all_cflags), - "extra_link_args": get_other_libs(all_libs), - } - - # merge all arguments together - ret = {} - for libname in libs: - lib_flags = kwargs(libname) - merge_flags(ret, lib_flags) - return ret diff --git a/venv/lib/python3.10/site-packages/cffi/recompiler.py b/venv/lib/python3.10/site-packages/cffi/recompiler.py deleted file mode 100644 index 57781a3..0000000 --- a/venv/lib/python3.10/site-packages/cffi/recompiler.py +++ /dev/null @@ -1,1598 +0,0 @@ -import os, sys, io -from . import ffiplatform, model -from .error import VerificationError -from .cffi_opcode import * - -VERSION_BASE = 0x2601 -VERSION_EMBEDDED = 0x2701 -VERSION_CHAR16CHAR32 = 0x2801 - -USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or - sys.version_info >= (3, 5)) - - -class GlobalExpr: - def __init__(self, name, address, type_op, size=0, check_value=0): - self.name = name - self.address = address - self.type_op = type_op - self.size = size - self.check_value = check_value - - def as_c_expr(self): - return ' { "%s", (void *)%s, %s, (void *)%s },' % ( - self.name, self.address, self.type_op.as_c_expr(), self.size) - - def as_python_expr(self): - return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, - self.check_value) - -class FieldExpr: - def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): - self.name = name - self.field_offset = field_offset - self.field_size = field_size - self.fbitsize = fbitsize - self.field_type_op = field_type_op - - def as_c_expr(self): - spaces = " " * len(self.name) - return (' { "%s", %s,\n' % (self.name, self.field_offset) + - ' %s %s,\n' % (spaces, self.field_size) + - ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) - - def as_python_expr(self): - raise NotImplementedError - - def as_field_python_expr(self): - if self.field_type_op.op == OP_NOOP: - size_expr = '' - elif self.field_type_op.op == OP_BITFIELD: - size_expr = format_four_bytes(self.fbitsize) - else: - raise NotImplementedError - return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), - size_expr, - self.name) - -class StructUnionExpr: - def __init__(self, name, type_index, flags, size, alignment, comment, - first_field_index, c_fields): - self.name = name - self.type_index = type_index - self.flags = flags - self.size = size - self.alignment = alignment - self.comment = comment - self.first_field_index = first_field_index - self.c_fields = c_fields - - def as_c_expr(self): - return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) - + '\n %s, %s, ' % (self.size, self.alignment) - + '%d, %d ' % (self.first_field_index, len(self.c_fields)) - + ('/* %s */ ' % self.comment if self.comment else '') - + '},') - - def as_python_expr(self): - flags = eval(self.flags, G_FLAGS) - fields_expr = [c_field.as_field_python_expr() - for c_field in self.c_fields] - return "(b'%s%s%s',%s)" % ( - format_four_bytes(self.type_index), - format_four_bytes(flags), - self.name, - ','.join(fields_expr)) - -class EnumExpr: - def __init__(self, name, type_index, size, signed, allenums): - self.name = name - self.type_index = type_index - self.size = size - self.signed = signed - self.allenums = allenums - - def as_c_expr(self): - return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' - ' "%s" },' % (self.name, self.type_index, - self.size, self.signed, self.allenums)) - - def as_python_expr(self): - prim_index = { - (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, - (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, - (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, - (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, - }[self.size, self.signed] - return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), - format_four_bytes(prim_index), - self.name, self.allenums) - -class TypenameExpr: - def __init__(self, name, type_index): - self.name = name - self.type_index = type_index - - def as_c_expr(self): - return ' { "%s", %d },' % (self.name, self.type_index) - - def as_python_expr(self): - return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) - - -# ____________________________________________________________ - - -class Recompiler: - _num_externpy = 0 - - def __init__(self, ffi, module_name, target_is_python=False): - self.ffi = ffi - self.module_name = module_name - self.target_is_python = target_is_python - self._version = VERSION_BASE - - def needs_version(self, ver): - self._version = max(self._version, ver) - - def collect_type_table(self): - self._typesdict = {} - self._generate("collecttype") - # - all_decls = sorted(self._typesdict, key=str) - # - # prepare all FUNCTION bytecode sequences first - self.cffi_types = [] - for tp in all_decls: - if tp.is_raw_function: - assert self._typesdict[tp] is None - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - for tp1 in tp.args: - assert isinstance(tp1, (model.VoidType, - model.BasePrimitiveType, - model.PointerType, - model.StructOrUnionOrEnum, - model.FunctionPtrType)) - if self._typesdict[tp1] is None: - self._typesdict[tp1] = len(self.cffi_types) - self.cffi_types.append(tp1) # placeholder - self.cffi_types.append('END') # placeholder - # - # prepare all OTHER bytecode sequences - for tp in all_decls: - if not tp.is_raw_function and self._typesdict[tp] is None: - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - if tp.is_array_type and tp.length is not None: - self.cffi_types.append('LEN') # placeholder - assert None not in self._typesdict.values() - # - # collect all structs and unions and enums - self._struct_unions = {} - self._enums = {} - for tp in all_decls: - if isinstance(tp, model.StructOrUnion): - self._struct_unions[tp] = None - elif isinstance(tp, model.EnumType): - self._enums[tp] = None - for i, tp in enumerate(sorted(self._struct_unions, - key=lambda tp: tp.name)): - self._struct_unions[tp] = i - for i, tp in enumerate(sorted(self._enums, - key=lambda tp: tp.name)): - self._enums[tp] = i - # - # emit all bytecode sequences now - for tp in all_decls: - method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) - method(tp, self._typesdict[tp]) - # - # consistency check - for op in self.cffi_types: - assert isinstance(op, CffiOp) - self.cffi_types = tuple(self.cffi_types) # don't change any more - - def _enum_fields(self, tp): - # When producing C, expand all anonymous struct/union fields. - # That's necessary to have C code checking the offsets of the - # individual fields contained in them. When producing Python, - # don't do it and instead write it like it is, with the - # corresponding fields having an empty name. Empty names are - # recognized at runtime when we import the generated Python - # file. - expand_anonymous_struct_union = not self.target_is_python - return tp.enumfields(expand_anonymous_struct_union) - - def _do_collect_type(self, tp): - if not isinstance(tp, model.BaseTypeByIdentity): - if isinstance(tp, tuple): - for x in tp: - self._do_collect_type(x) - return - if tp not in self._typesdict: - self._typesdict[tp] = None - if isinstance(tp, model.FunctionPtrType): - self._do_collect_type(tp.as_raw_function()) - elif isinstance(tp, model.StructOrUnion): - if tp.fldtypes is not None and ( - tp not in self.ffi._parser._included_declarations): - for name1, tp1, _, _ in self._enum_fields(tp): - self._do_collect_type(self._field_type(tp, name1, tp1)) - else: - for _, x in tp._get_items(): - self._do_collect_type(x) - - def _generate(self, step_name): - lst = self.ffi._parser._declarations.items() - for name, (tp, quals) in sorted(lst): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in recompile(): %r" % name) - try: - self._current_quals = quals - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - # ---------- - - ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] - - def collect_step_tables(self): - # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. - self._lsts = {} - for step_name in self.ALL_STEPS: - self._lsts[step_name] = [] - self._seen_struct_unions = set() - self._generate("ctx") - self._add_missing_struct_unions() - # - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if step_name != "field": - lst.sort(key=lambda entry: entry.name) - self._lsts[step_name] = tuple(lst) # don't change any more - # - # check for a possible internal inconsistency: _cffi_struct_unions - # should have been generated with exactly self._struct_unions - lst = self._lsts["struct_union"] - for tp, i in self._struct_unions.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._struct_unions) - # same with enums - lst = self._lsts["enum"] - for tp, i in self._enums.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._enums) - - # ---------- - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self, f, preamble): - if self.target_is_python: - assert preamble is None - self.write_py_source_to_f(f) - else: - assert preamble is not None - self.write_c_source_to_f(f, preamble) - - def _rel_readlines(self, filename): - g = open(os.path.join(os.path.dirname(__file__), filename), 'r') - lines = g.readlines() - g.close() - return lines - - def write_c_source_to_f(self, f, preamble): - self._f = f - prnt = self._prnt - if self.ffi._embedding is not None: - prnt('#define _CFFI_USE_EMBEDDING') - if not USE_LIMITED_API: - prnt('#define _CFFI_NO_LIMITED_API') - # - # first the '#include' (actually done by inlining the file's content) - lines = self._rel_readlines('_cffi_include.h') - i = lines.index('#include "parse_c_type.h"\n') - lines[i:i+1] = self._rel_readlines('parse_c_type.h') - prnt(''.join(lines)) - # - # if we have ffi._embedding != None, we give it here as a macro - # and include an extra file - base_module_name = self.module_name.split('.')[-1] - if self.ffi._embedding is not None: - prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) - prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') - self._print_string_literal_in_array(self.ffi._embedding) - prnt('0 };') - prnt('#ifdef PYPY_VERSION') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( - base_module_name,)) - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( - base_module_name,)) - prnt('#else') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( - base_module_name,)) - prnt('#endif') - lines = self._rel_readlines('_embedding.h') - i = lines.index('#include "_cffi_errors.h"\n') - lines[i:i+1] = self._rel_readlines('_cffi_errors.h') - prnt(''.join(lines)) - self.needs_version(VERSION_EMBEDDED) - # - # then paste the C source given by the user, verbatim. - prnt('/************************************************************/') - prnt() - prnt(preamble) - prnt() - prnt('/************************************************************/') - prnt() - # - # the declaration of '_cffi_types' - prnt('static void *_cffi_types[] = {') - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - for i, op in enumerate(self.cffi_types): - comment = '' - if i in typeindex2type: - comment = ' // ' + typeindex2type[i]._get_c_name() - prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) - if not self.cffi_types: - prnt(' 0') - prnt('};') - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._seen_constants = set() - self._generate("decl") - # - # the declaration of '_cffi_globals' and '_cffi_typenames' - nums = {} - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - nums[step_name] = len(lst) - if nums[step_name] > 0: - prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( - step_name, step_name)) - for entry in lst: - prnt(entry.as_c_expr()) - prnt('};') - prnt() - # - # the declaration of '_cffi_includes' - if self.ffi._included_ffis: - prnt('static const char * const _cffi_includes[] = {') - for ffi_to_include in self.ffi._included_ffis: - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is None: - raise VerificationError( - "not implemented yet: ffi.include() of a Python-based " - "ffi inside a C-based ffi") - prnt(' "%s",' % (included_module_name,)) - prnt(' NULL') - prnt('};') - prnt() - # - # the declaration of '_cffi_type_context' - prnt('static const struct _cffi_type_context_s _cffi_type_context = {') - prnt(' _cffi_types,') - for step_name in self.ALL_STEPS: - if nums[step_name] > 0: - prnt(' _cffi_%ss,' % step_name) - else: - prnt(' NULL, /* no %ss */' % step_name) - for step_name in self.ALL_STEPS: - if step_name != "field": - prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) - if self.ffi._included_ffis: - prnt(' _cffi_includes,') - else: - prnt(' NULL, /* no includes */') - prnt(' %d, /* num_types */' % (len(self.cffi_types),)) - flags = 0 - if self._num_externpy > 0 or self.ffi._embedding is not None: - flags |= 1 # set to mean that we use extern "Python" - prnt(' %d, /* flags */' % flags) - prnt('};') - prnt() - # - # the init function - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') - prnt('#endif') - prnt() - prnt('#ifdef PYPY_VERSION') - prnt('PyMODINIT_FUNC') - prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) - prnt('{') - if flags & 1: - prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python_org = ' - '(void(*)(struct _cffi_externpy_s *, char *))p[1];') - prnt(' }') - prnt(' p[0] = (const void *)0x%x;' % self._version) - prnt(' p[1] = &_cffi_type_context;') - prnt('#if PY_MAJOR_VERSION >= 3') - prnt(' return NULL;') - prnt('#endif') - prnt('}') - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - prnt('# ifdef _MSC_VER') - prnt(' PyMODINIT_FUNC') - prnt('# if PY_MAJOR_VERSION >= 3') - prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) - prnt('# else') - prnt(' init%s(void) { }' % (base_module_name,)) - prnt('# endif') - prnt('# endif') - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % (base_module_name,)) - prnt('{') - prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#else') - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % (base_module_name,)) - prnt('{') - prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#endif') - prnt() - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility pop') - prnt('#endif') - self._version = None - - def _to_py(self, x): - if isinstance(x, str): - return "b'%s'" % (x,) - if isinstance(x, (list, tuple)): - rep = [self._to_py(item) for item in x] - if len(rep) == 1: - rep.append('') - return "(%s)" % (','.join(rep),) - return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. - - def write_py_source_to_f(self, f): - self._f = f - prnt = self._prnt - # - # header - prnt("# auto-generated file") - prnt("import _cffi_backend") - # - # the 'import' of the included ffis - num_includes = len(self.ffi._included_ffis or ()) - for i in range(num_includes): - ffi_to_include = self.ffi._included_ffis[i] - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is not None: - raise VerificationError( - "not implemented yet: ffi.include() of a C-based " - "ffi inside a Python-based ffi") - prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) - prnt() - prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) - prnt(" _version = 0x%x," % (self._version,)) - self._version = None - # - # the '_types' keyword argument - self.cffi_types = tuple(self.cffi_types) # don't change any more - types_lst = [op.as_python_bytes() for op in self.cffi_types] - prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - # - # the keyword arguments from ALL_STEPS - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if len(lst) > 0 and step_name != "field": - prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) - # - # the '_includes' keyword argument - if num_includes > 0: - prnt(' _includes = (%s,),' % ( - ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) - # - # the footer - prnt(')') - - # ---------- - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif isinstance(tp, model.UnknownFloatType): - # don't check with is_float_type(): it may be a 'long - # double' here, and _cffi_to_c_double would loose precision - converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) - else: - cname = tp.get_c_name('') - converter = '(%s)_cffi_to_c_%s' % (cname, - tp.name.replace(' ', '_')) - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif (isinstance(tp, model.StructOrUnionOrEnum) or - isinstance(tp, model.BasePrimitiveType)): - # a struct (not a struct pointer) as a function argument; - # or, a complex (the same code works) - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - '(%s)alloca((size_t)datasize) : NULL;' % ( - tovar, tp.get_c_name(''))) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.BasePrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif isinstance(tp, model.UnknownFloatType): - return '_cffi_from_c_double(%s)' % (var,) - elif tp.name != 'long double' and not tp.is_complex_type(): - cname = tp.name.replace(' ', '_') - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - return '_cffi_from_c_%s(%s)' % (cname, var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs - - def _typedef_type(self, tp, name): - return self._global_type(tp, "(*(%s *)0)" % (name,)) - - def _generate_cpy_typedef_collecttype(self, tp, name): - self._do_collect_type(self._typedef_type(tp, name)) - - def _generate_cpy_typedef_decl(self, tp, name): - pass - - def _typedef_ctx(self, tp, name): - type_index = self._typesdict[tp] - self._lsts["typename"].append(TypenameExpr(name, type_index)) - - def _generate_cpy_typedef_ctx(self, tp, name): - tp = self._typedef_type(tp, name) - self._typedef_ctx(tp, name) - if getattr(tp, "origin", None) == "unknown_type": - self._struct_ctx(tp, tp.name, approxname=None) - elif isinstance(tp, model.NamedPointerType): - self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, - named_ptr=tp) - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - self._do_collect_type(tp.as_raw_function()) - if tp.ellipsis and not self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_function_decl(self, tp, name): - assert not self.target_is_python - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_constant_decl(tp, name) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - # - # ------------------------------ - # the 'd' version of the function, only for addressof(lib, 'func') - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arguments.append(type.get_c_name(' x%d' % i, context)) - call_arguments.append('x%d' % i) - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) - prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) - prnt('{') - call_arguments = ', '.join(call_arguments) - result_code = 'return ' - if isinstance(tp.result, model.VoidType): - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, call_arguments)) - prnt('}') - # - prnt('#ifndef PYPY_VERSION') # ------------------------------ - # - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' x%d' % i, context) - prnt(' %s;' % arg) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - result_decl = ' %s;' % tp.result.get_c_name(' result', context) - prnt(result_decl) - prnt(' PyObject *pyresult;') - else: - result_decl = None - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( - name, len(rng), len(rng), - ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - call_arguments = ['x%d' % i for i in range(len(tp.args))] - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - # - prnt('#else') # ------------------------------ - # - # the PyPy version: need to replace struct/union arguments with - # pointers, and if the result is a struct/union, insert a first - # arg that is a pointer to the result. We also do that for - # complex args and return type. - def need_indirection(type): - return (isinstance(type, model.StructOrUnion) or - (isinstance(type, model.PrimitiveType) and - type.is_complex_type())) - difference = False - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - indirection = '' - if need_indirection(type): - indirection = '*' - difference = True - arg = type.get_c_name(' %sx%d' % (indirection, i), context) - arguments.append(arg) - call_arguments.append('%sx%d' % (indirection, i)) - tp_result = tp.result - if need_indirection(tp_result): - context = 'result of %s' % name - arg = tp_result.get_c_name(' *result', context) - arguments.insert(0, arg) - tp_result = model.void_type - result_decl = None - result_code = '*result = ' - difference = True - if difference: - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, - repr_arguments) - prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) - prnt('{') - if result_decl: - prnt(result_decl) - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - if result_decl: - prnt(' return result;') - prnt('}') - else: - prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) - # - prnt('#endif') # ------------------------------ - prnt() - - def _generate_cpy_function_ctx(self, tp, name): - if tp.ellipsis and not self.target_is_python: - self._generate_cpy_constant_ctx(tp, name) - return - type_index = self._typesdict[tp.as_raw_function()] - numargs = len(tp.args) - if self.target_is_python: - meth_kind = OP_DLOPEN_FUNC - elif numargs == 0: - meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' - elif numargs == 1: - meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' - else: - meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' - self._lsts["global"].append( - GlobalExpr(name, '_cffi_f_%s' % name, - CffiOp(meth_kind, type_index), - size='_cffi_d_%s' % name)) - - # ---------- - # named structs or unions - - def _field_type(self, tp_struct, field_name, tp_field): - if isinstance(tp_field, model.ArrayType): - actual_length = tp_field.length - if actual_length == '...': - ptr_struct_name = tp_struct.get_c_name('*') - actual_length = '_cffi_array_len(((%s)0)->%s)' % ( - ptr_struct_name, field_name) - tp_item = self._field_type(tp_struct, '%s[0]' % field_name, - tp_field.item) - tp_field = model.ArrayType(tp_item, actual_length) - return tp_field - - def _struct_collecttype(self, tp): - self._do_collect_type(tp) - if self.target_is_python: - # also requires nested anon struct/unions in ABI mode, recursively - for fldtype in tp.anonymous_struct_fields(): - self._struct_collecttype(fldtype) - - def _struct_decl(self, tp, cname, approxname): - if tp.fldtypes is None: - return - prnt = self._prnt - checkfuncname = '_cffi_checkfld_%s' % (approxname,) - prnt('_CFFI_UNUSED_FN') - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in self._enum_fields(tp): - try: - if ftype.is_integer_type() or fbitsize >= 0: - # accept all integers, but complain on float or double - if fname != '': - prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " - "an integer */" % (fname, cname, fname)) - continue - # only accept exactly the type declared, except that '[]' - # is interpreted as a '*' and so will match any array length. - # (It would also match '*', but that's harder to detect...) - while (isinstance(ftype, model.ArrayType) - and (ftype.length is None or ftype.length == '...')): - ftype = ftype.item - fname = fname + '[0]' - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) - prnt() - - def _struct_ctx(self, tp, cname, approxname, named_ptr=None): - type_index = self._typesdict[tp] - reason_for_not_expanding = None - flags = [] - if isinstance(tp, model.UnionType): - flags.append("_CFFI_F_UNION") - if tp.fldtypes is None: - flags.append("_CFFI_F_OPAQUE") - reason_for_not_expanding = "opaque" - if (tp not in self.ffi._parser._included_declarations and - (named_ptr is None or - named_ptr not in self.ffi._parser._included_declarations)): - if tp.fldtypes is None: - pass # opaque - elif tp.partial or any(tp.anonymous_struct_fields()): - pass # field layout obtained silently from the C compiler - else: - flags.append("_CFFI_F_CHECK_FIELDS") - if tp.packed: - if tp.packed > 1: - raise NotImplementedError( - "%r is declared with 'pack=%r'; only 0 or 1 are " - "supported in API mode (try to use \"...;\", which " - "does not require a 'pack' declaration)" % - (tp, tp.packed)) - flags.append("_CFFI_F_PACKED") - else: - flags.append("_CFFI_F_EXTERNAL") - reason_for_not_expanding = "external" - flags = '|'.join(flags) or '0' - c_fields = [] - if reason_for_not_expanding is None: - enumfields = list(self._enum_fields(tp)) - for fldname, fldtype, fbitsize, fqual in enumfields: - fldtype = self._field_type(tp, fldname, fldtype) - self._check_not_opaque(fldtype, - "field '%s.%s'" % (tp.name, fldname)) - # cname is None for _add_missing_struct_unions() only - op = OP_NOOP - if fbitsize >= 0: - op = OP_BITFIELD - size = '%d /* bits */' % fbitsize - elif cname is None or ( - isinstance(fldtype, model.ArrayType) and - fldtype.length is None): - size = '(size_t)-1' - else: - size = 'sizeof(((%s)0)->%s)' % ( - tp.get_c_name('*') if named_ptr is None - else named_ptr.name, - fldname) - if cname is None or fbitsize >= 0: - offset = '(size_t)-1' - elif named_ptr is not None: - offset = '((char *)&((%s)4096)->%s) - (char *)4096' % ( - named_ptr.name, fldname) - else: - offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) - c_fields.append( - FieldExpr(fldname, offset, size, fbitsize, - CffiOp(op, self._typesdict[fldtype]))) - first_field_index = len(self._lsts["field"]) - self._lsts["field"].extend(c_fields) - # - if cname is None: # unknown name, for _add_missing_struct_unions - size = '(size_t)-2' - align = -2 - comment = "unnamed" - else: - if named_ptr is not None: - size = 'sizeof(*(%s)0)' % (named_ptr.name,) - align = '-1 /* unknown alignment */' - else: - size = 'sizeof(%s)' % (cname,) - align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) - comment = None - else: - size = '(size_t)-1' - align = -1 - first_field_index = -1 - comment = reason_for_not_expanding - self._lsts["struct_union"].append( - StructUnionExpr(tp.name, type_index, flags, size, align, comment, - first_field_index, c_fields)) - self._seen_struct_unions.add(tp) - - def _check_not_opaque(self, tp, location): - while isinstance(tp, model.ArrayType): - tp = tp.item - if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: - raise TypeError( - "%s is of an opaque type (not declared in cdef())" % location) - - def _add_missing_struct_unions(self): - # not very nice, but some struct declarations might be missing - # because they don't have any known C name. Check that they are - # not partial (we can't complete or verify them!) and emit them - # anonymously. - lst = list(self._struct_unions.items()) - lst.sort(key=lambda tp_order: tp_order[1]) - for tp, order in lst: - if tp not in self._seen_struct_unions: - if tp.partial: - raise NotImplementedError("internal inconsistency: %r is " - "partial but was not seen at " - "this point" % (tp,)) - if tp.name.startswith('$') and tp.name[1:].isdigit(): - approxname = tp.name[1:] - elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': - approxname = 'FILE' - self._typedef_ctx(tp, 'FILE') - else: - raise NotImplementedError("internal inconsistency: %r" % - (tp,)) - self._struct_ctx(tp, None, approxname) - - def _generate_cpy_struct_collecttype(self, tp, name): - self._struct_collecttype(tp) - _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype - - def _struct_names(self, tp): - cname = tp.get_c_name('') - if ' ' in cname: - return cname, cname.replace(' ', '_') - else: - return cname, '_' + cname - - def _generate_cpy_struct_decl(self, tp, name): - self._struct_decl(tp, *self._struct_names(tp)) - _generate_cpy_union_decl = _generate_cpy_struct_decl - - def _generate_cpy_struct_ctx(self, tp, name): - self._struct_ctx(tp, *self._struct_names(tp)) - _generate_cpy_union_ctx = _generate_cpy_struct_ctx - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_cpy_anonymous_collecttype(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_collecttype(tp, name) - else: - self._struct_collecttype(tp) - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp) - else: - self._struct_decl(tp, name, 'typedef_' + name) - - def _generate_cpy_anonymous_ctx(self, tp, name): - if isinstance(tp, model.EnumType): - self._enum_ctx(tp, name) - else: - self._struct_ctx(tp, name, 'typedef_' + name) - - # ---------- - # constants, declared with "static const ..." - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - check_value=None): - if (category, name) in self._seen_constants: - raise VerificationError( - "duplicate declaration of %s '%s'" % (category, name)) - self._seen_constants.add((category, name)) - # - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - if is_int: - prnt('static int %s(unsigned long long *o)' % funcname) - prnt('{') - prnt(' int n = (%s) <= 0;' % (name,)) - prnt(' *o = (unsigned long long)((%s) | 0);' - ' /* check that %s is an integer */' % (name, name)) - if check_value is not None: - if check_value > 0: - check_value = '%dU' % (check_value,) - prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) - prnt(' n |= 2;') - prnt(' return n;') - prnt('}') - else: - assert check_value is None - prnt('static void %s(char *o)' % funcname) - prnt('{') - prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = tp.is_integer_type() - if not is_int or self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - def _generate_cpy_constant_ctx(self, tp, name): - if not self.target_is_python and tp.is_integer_type(): - type_op = CffiOp(OP_CONSTANT_INT, -1) - else: - if self.target_is_python: - const_kind = OP_DLOPEN_CONST - else: - const_kind = OP_CONSTANT - type_index = self._typesdict[tp] - type_op = CffiOp(const_kind, type_index) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op)) - - # ---------- - # enums - - def _generate_cpy_enum_collecttype(self, tp, name): - self._do_collect_type(tp) - - def _generate_cpy_enum_decl(self, tp, name=None): - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator) - - def _enum_ctx(self, tp, cname): - type_index = self._typesdict[tp] - type_op = CffiOp(OP_ENUM, -1) - if self.target_is_python: - tp.check_not_partial() - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._lsts["global"].append( - GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, - check_value=enumvalue)) - # - if cname is not None and '$' not in cname and not self.target_is_python: - size = "sizeof(%s)" % cname - signed = "((%s)-1) <= 0" % cname - else: - basetp = tp.build_baseinttype(self.ffi, []) - size = self.ffi.sizeof(basetp) - signed = int(int(self.ffi.cast(basetp, -1)) < 0) - allenums = ",".join(tp.enumerators) - self._lsts["enum"].append( - EnumExpr(tp.name, type_index, size, signed, allenums)) - - def _generate_cpy_enum_ctx(self, tp, name): - self._enum_ctx(tp, tp._get_c_name()) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_collecttype(self, tp, name): - pass - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - def _generate_cpy_macro_ctx(self, tp, name): - if tp == '...': - if self.target_is_python: - raise VerificationError( - "cannot use the syntax '...' in '#define %s ...' when " - "using the ABI mode" % (name,)) - check_value = None - else: - check_value = tp # an integer - type_op = CffiOp(OP_CONSTANT_INT, -1) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op, - check_value=check_value)) - - # ---------- - # global variables - - def _global_type(self, tp, global_name): - if isinstance(tp, model.ArrayType): - actual_length = tp.length - if actual_length == '...': - actual_length = '_cffi_array_len(%s)' % (global_name,) - tp_item = self._global_type(tp.item, '%s[0]' % global_name) - tp = model.ArrayType(tp_item, actual_length) - return tp - - def _generate_cpy_variable_collecttype(self, tp, name): - self._do_collect_type(self._global_type(tp, name)) - - def _generate_cpy_variable_decl(self, tp, name): - prnt = self._prnt - tp = self._global_type(tp, name) - if isinstance(tp, model.ArrayType) and tp.length is None: - tp = tp.item - ampersand = '' - else: - ampersand = '&' - # This code assumes that casts from "tp *" to "void *" is a - # no-op, i.e. a function that returns a "tp *" can be called - # as if it returned a "void *". This should be generally true - # on any modern machine. The only exception to that rule (on - # uncommon architectures, and as far as I can tell) might be - # if 'tp' were a function type, but that is not possible here. - # (If 'tp' is a function _pointer_ type, then casts from "fn_t - # **" to "void *" are again no-ops, as far as I can tell.) - decl = '*_cffi_var_%s(void)' % (name,) - prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) - prnt('{') - prnt(' return %s(%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_cpy_variable_ctx(self, tp, name): - tp = self._global_type(tp, name) - type_index = self._typesdict[tp] - if self.target_is_python: - op = OP_GLOBAL_VAR - else: - op = OP_GLOBAL_VAR_F - self._lsts["global"].append( - GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) - - # ---------- - # extern "Python" - - def _generate_cpy_extern_python_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - self._do_collect_type(tp) - _generate_cpy_dllexport_python_collecttype = \ - _generate_cpy_extern_python_plus_c_collecttype = \ - _generate_cpy_extern_python_collecttype - - def _extern_python_decl(self, tp, name, tag_and_space): - prnt = self._prnt - if isinstance(tp.result, model.VoidType): - size_of_result = '0' - else: - context = 'result of %s' % name - size_of_result = '(int)sizeof(%s)' % ( - tp.result.get_c_name('', context),) - prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) - prnt(' { "%s.%s", %s, 0, 0 };' % ( - self.module_name, name, size_of_result)) - prnt() - # - arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' a%d' % i, context) - arguments.append(arg) - # - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s(%s)' % (name, repr_arguments) - if tp.abi == "__stdcall": - name_and_arguments = '_cffi_stdcall ' + name_and_arguments - # - def may_need_128_bits(tp): - return (isinstance(tp, model.PrimitiveType) and - tp.name == 'long double') - # - size_of_a = max(len(tp.args)*8, 8) - if may_need_128_bits(tp.result): - size_of_a = max(size_of_a, 16) - if isinstance(tp.result, model.StructOrUnion): - size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( - tp.result.get_c_name(''), size_of_a, - tp.result.get_c_name(''), size_of_a) - prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) - prnt('{') - prnt(' char a[%s];' % size_of_a) - prnt(' char *p = a;') - for i, type in enumerate(tp.args): - arg = 'a%d' % i - if (isinstance(type, model.StructOrUnion) or - may_need_128_bits(type)): - arg = '&' + arg - type = model.PointerType(type) - prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) - prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) - if not isinstance(tp.result, model.VoidType): - prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) - prnt('}') - prnt() - self._num_externpy += 1 - - def _generate_cpy_extern_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'static ') - - def _generate_cpy_dllexport_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') - - def _generate_cpy_extern_python_plus_c_decl(self, tp, name): - self._extern_python_decl(tp, name, '') - - def _generate_cpy_extern_python_ctx(self, tp, name): - if self.target_is_python: - raise VerificationError( - "cannot use 'extern \"Python\"' in the ABI mode") - if tp.ellipsis: - raise NotImplementedError("a vararg function is extern \"Python\"") - type_index = self._typesdict[tp] - type_op = CffiOp(OP_EXTERN_PYTHON, type_index) - self._lsts["global"].append( - GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) - - _generate_cpy_dllexport_python_ctx = \ - _generate_cpy_extern_python_plus_c_ctx = \ - _generate_cpy_extern_python_ctx - - def _print_string_literal_in_array(self, s): - prnt = self._prnt - prnt('// # NB. this is not a string because of a size limit in MSVC') - if not isinstance(s, bytes): # unicode - s = s.encode('utf-8') # -> bytes - else: - s.decode('utf-8') # got bytes, check for valid utf-8 - try: - s.decode('ascii') - except UnicodeDecodeError: - s = b'# -*- encoding: utf8 -*-\n' + s - for line in s.splitlines(True): - comment = line - if type('//') is bytes: # python2 - line = map(ord, line) # make a list of integers - else: # python3 - # type(line) is bytes, which enumerates like a list of integers - comment = ascii(comment)[1:-1] - prnt(('// ' + comment).rstrip()) - printed_line = '' - for c in line: - if len(printed_line) >= 76: - prnt(printed_line) - printed_line = '' - printed_line += '%d,' % (c,) - prnt(printed_line) - - # ---------- - # emitting the opcodes for individual types - - def _emit_bytecode_VoidType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) - - def _emit_bytecode_PrimitiveType(self, tp, index): - prim_index = PRIMITIVE_TO_INDEX[tp.name] - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) - - def _emit_bytecode_UnknownIntegerType(self, tp, index): - s = ('_cffi_prim_int(sizeof(%s), (\n' - ' ((%s)-1) | 0 /* check that %s is an integer type */\n' - ' ) <= 0)' % (tp.name, tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_UnknownFloatType(self, tp, index): - s = ('_cffi_prim_float(sizeof(%s) *\n' - ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' - ' )' % (tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_RawFunctionType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) - index += 1 - for tp1 in tp.args: - realindex = self._typesdict[tp1] - if index != realindex: - if isinstance(tp1, model.PrimitiveType): - self._emit_bytecode_PrimitiveType(tp1, index) - else: - self.cffi_types[index] = CffiOp(OP_NOOP, realindex) - index += 1 - flags = int(tp.ellipsis) - if tp.abi is not None: - if tp.abi == '__stdcall': - flags |= 2 - else: - raise NotImplementedError("abi=%r" % (tp.abi,)) - self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) - - def _emit_bytecode_PointerType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) - - _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType - _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType - - def _emit_bytecode_FunctionPtrType(self, tp, index): - raw = tp.as_raw_function() - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) - - def _emit_bytecode_ArrayType(self, tp, index): - item_index = self._typesdict[tp.item] - if tp.length is None: - self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) - elif tp.length == '...': - raise VerificationError( - "type %s badly placed: the '...' array length can only be " - "used on global arrays or on fields of structures" % ( - str(tp).replace('/*...*/', '...'),)) - else: - assert self.cffi_types[index + 1] == 'LEN' - self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) - self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) - - def _emit_bytecode_StructType(self, tp, index): - struct_index = self._struct_unions[tp] - self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) - _emit_bytecode_UnionType = _emit_bytecode_StructType - - def _emit_bytecode_EnumType(self, tp, index): - enum_index = self._enums[tp] - self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - -def _is_file_like(maybefile): - # compare to xml.etree.ElementTree._get_writer - return hasattr(maybefile, 'write') - -def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): - if verbose: - print("generating %s" % (target_file,)) - recompiler = Recompiler(ffi, module_name, - target_is_python=(preamble is None)) - recompiler.collect_type_table() - recompiler.collect_step_tables() - if _is_file_like(target_file): - recompiler.write_source_to_f(target_file, preamble) - return True - f = NativeIO() - recompiler.write_source_to_f(f, preamble) - output = f.getvalue() - try: - with open(target_file, 'r') as f1: - if f1.read(len(output) + 1) != output: - raise IOError - if verbose: - print("(already up-to-date)") - return False # already up-to-date - except IOError: - tmp_file = '%s.~%d' % (target_file, os.getpid()) - with open(tmp_file, 'w') as f1: - f1.write(output) - try: - os.rename(tmp_file, target_file) - except OSError: - os.unlink(target_file) - os.rename(tmp_file, target_file) - return True - -def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): - assert preamble is not None - return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, - verbose) - -def make_py_source(ffi, module_name, target_py_file, verbose=False): - return _make_c_or_py_source(ffi, module_name, None, target_py_file, - verbose) - -def _modname_to_file(outputdir, modname, extension): - parts = modname.split('.') - try: - os.makedirs(os.path.join(outputdir, *parts[:-1])) - except OSError: - pass - parts[-1] += extension - return os.path.join(outputdir, *parts), parts - - -# Aaargh. Distutils is not tested at all for the purpose of compiling -# DLLs that are not extension modules. Here are some hacks to work -# around that, in the _patch_for_*() functions... - -def _patch_meth(patchlist, cls, name, new_meth): - old = getattr(cls, name) - patchlist.append((cls, name, old)) - setattr(cls, name, new_meth) - return old - -def _unpatch_meths(patchlist): - for cls, name, old_meth in reversed(patchlist): - setattr(cls, name, old_meth) - -def _patch_for_embedding(patchlist): - if sys.platform == 'win32': - # we must not remove the manifest when building for embedding! - # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, - # since the toolchain it supports (VS2005-2008) is also long dead. - from cffi._shimmed_dist_utils import MSVCCompiler - if MSVCCompiler is not None: - _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', - lambda self, manifest_file: manifest_file) - - if sys.platform == 'darwin': - # we must not make a '-bundle', but a '-dynamiclib' instead - from cffi._shimmed_dist_utils import CCompiler - def my_link_shared_object(self, *args, **kwds): - if '-bundle' in self.linker_so: - self.linker_so = list(self.linker_so) - i = self.linker_so.index('-bundle') - self.linker_so[i] = '-dynamiclib' - return old_link_shared_object(self, *args, **kwds) - old_link_shared_object = _patch_meth(patchlist, CCompiler, - 'link_shared_object', - my_link_shared_object) - -def _patch_for_target(patchlist, target): - from cffi._shimmed_dist_utils import build_ext - # if 'target' is different from '*', we need to patch some internal - # method to just return this 'target' value, instead of having it - # built from module_name - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - elif sys.platform == 'darwin': - target += '.dylib' - else: - target += '.so' - _patch_meth(patchlist, build_ext, 'get_ext_filename', - lambda self, ext_name: target) - - -def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, - c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, target=None, debug=None, - uses_ffiplatform=True, **kwds): - if not isinstance(module_name, str): - module_name = module_name.encode('ascii') - if ffi._windows_unicode: - ffi._apply_windows_unicode(kwds) - if preamble is not None: - if call_c_compiler and _is_file_like(c_file): - raise TypeError("Writing to file-like objects is not supported " - "with call_c_compiler=True") - embedding = (ffi._embedding is not None) - if embedding: - ffi._apply_embedding_fix(kwds) - if c_file is None: - c_file, parts = _modname_to_file(tmpdir, module_name, - source_extension) - if extradir: - parts = [extradir] + parts - ext_c_file = os.path.join(*parts) - else: - ext_c_file = c_file - # - if target is None: - if embedding: - target = '%s.*' % module_name - else: - target = '*' - # - if uses_ffiplatform: - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) - else: - ext = None - updated = make_c_source(ffi, module_name, preamble, c_file, - verbose=compiler_verbose) - if call_c_compiler: - patchlist = [] - cwd = os.getcwd() - try: - if embedding: - _patch_for_embedding(patchlist) - if target != '*': - _patch_for_target(patchlist, target) - if compiler_verbose: - if tmpdir == '.': - msg = 'the current directory is' - else: - msg = 'setting the current directory to' - print('%s %r' % (msg, os.path.abspath(tmpdir))) - os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, - compiler_verbose, debug) - finally: - os.chdir(cwd) - _unpatch_meths(patchlist) - return outputfilename - else: - return ext, updated - else: - if c_file is None: - c_file, _ = _modname_to_file(tmpdir, module_name, '.py') - updated = make_py_source(ffi, module_name, c_file, - verbose=compiler_verbose) - if call_c_compiler: - return c_file - else: - return None, updated - diff --git a/venv/lib/python3.10/site-packages/cffi/setuptools_ext.py b/venv/lib/python3.10/site-packages/cffi/setuptools_ext.py deleted file mode 100644 index 681b49d..0000000 --- a/venv/lib/python3.10/site-packages/cffi/setuptools_ext.py +++ /dev/null @@ -1,216 +0,0 @@ -import os -import sys - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -def error(msg): - from cffi._shimmed_dist_utils import DistutilsSetupError - raise DistutilsSetupError(msg) - - -def execfile(filename, glob): - # We use execfile() (here rewritten for Python 3) instead of - # __import__() to load the build script. The problem with - # a normal import is that in some packages, the intermediate - # __init__.py files may already try to import the file that - # we are generating. - with open(filename) as f: - src = f.read() - src += '\n' # Python 2.6 compatibility - code = compile(src, filename, 'exec') - exec(code, glob, glob) - - -def add_cffi_module(dist, mod_spec): - from cffi.api import FFI - - if not isinstance(mod_spec, basestring): - error("argument to 'cffi_modules=...' must be a str or a list of str," - " not %r" % (type(mod_spec).__name__,)) - mod_spec = str(mod_spec) - try: - build_file_name, ffi_var_name = mod_spec.split(':') - except ValueError: - error("%r must be of the form 'path/build.py:ffi_variable'" % - (mod_spec,)) - if not os.path.exists(build_file_name): - ext = '' - rewritten = build_file_name.replace('.', '/') + '.py' - if os.path.exists(rewritten): - ext = ' (rewrite cffi_modules to [%r])' % ( - rewritten + ':' + ffi_var_name,) - error("%r does not name an existing file%s" % (build_file_name, ext)) - - mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} - execfile(build_file_name, mod_vars) - - try: - ffi = mod_vars[ffi_var_name] - except KeyError: - error("%r: object %r not found in module" % (mod_spec, - ffi_var_name)) - if not isinstance(ffi, FFI): - ffi = ffi() # maybe it's a function instead of directly an ffi - if not isinstance(ffi, FFI): - error("%r is not an FFI instance (got %r)" % (mod_spec, - type(ffi).__name__)) - if not hasattr(ffi, '_assigned_source'): - error("%r: the set_source() method was not called" % (mod_spec,)) - module_name, source, source_extension, kwds = ffi._assigned_source - if ffi._windows_unicode: - kwds = kwds.copy() - ffi._apply_windows_unicode(kwds) - - if source is None: - _add_py_module(dist, ffi, module_name) - else: - _add_c_module(dist, ffi, module_name, source, source_extension, kwds) - -def _set_py_limited_api(Extension, kwds): - """ - Add py_limited_api to kwds if setuptools >= 26 is in use. - Do not alter the setting if it already exists. - Setuptools takes care of ignoring the flag on Python 2 and PyPy. - - CPython itself should ignore the flag in a debugging version - (by not listing .abi3.so in the extensions it supports), but - it doesn't so far, creating troubles. That's why we check - for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent - of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) - - On Windows, with CPython <= 3.4, it's better not to use py_limited_api - because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. - Recently (2020) we started shipping only >= 3.5 wheels, though. So - we'll give it another try and set py_limited_api on Windows >= 3.5. - """ - from cffi import recompiler - - if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') - and recompiler.USE_LIMITED_API): - import setuptools - try: - setuptools_major_version = int(setuptools.__version__.partition('.')[0]) - if setuptools_major_version >= 26: - kwds['py_limited_api'] = True - except ValueError: # certain development versions of setuptools - # If we don't know the version number of setuptools, we - # try to set 'py_limited_api' anyway. At worst, we get a - # warning. - kwds['py_limited_api'] = True - return kwds - -def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): - # We are a setuptools extension. Need this build_ext for py_limited_api. - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import Extension, log, mkpath - from cffi import recompiler - - allsources = ['$PLACEHOLDER'] - allsources.extend(kwds.pop('sources', [])) - kwds = _set_py_limited_api(Extension, kwds) - ext = Extension(name=module_name, sources=allsources, **kwds) - - def make_mod(tmpdir, pre_run=None): - c_file = os.path.join(tmpdir, module_name + source_extension) - log.info("generating cffi module %r" % c_file) - mkpath(tmpdir) - # a setuptools-only, API-only hook: called with the "ext" and "ffi" - # arguments just before we turn the ffi into C code. To use it, - # subclass the 'distutils.command.build_ext.build_ext' class and - # add a method 'def pre_run(self, ext, ffi)'. - if pre_run is not None: - pre_run(ext, ffi) - updated = recompiler.make_c_source(ffi, module_name, source, c_file) - if not updated: - log.info("already up-to-date") - return c_file - - if dist.ext_modules is None: - dist.ext_modules = [] - dist.ext_modules.append(ext) - - base_class = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class): - def run(self): - if ext.sources[0] == '$PLACEHOLDER': - pre_run = getattr(self, 'pre_run', None) - ext.sources[0] = make_mod(self.build_temp, pre_run) - base_class.run(self) - dist.cmdclass['build_ext'] = build_ext_make_mod - # NB. multiple runs here will create multiple 'build_ext_make_mod' - # classes. Even in this case the 'build_ext' command should be - # run once; but just in case, the logic above does nothing if - # called again. - - -def _add_py_module(dist, ffi, module_name): - from setuptools.command.build_py import build_py - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import log, mkpath - from cffi import recompiler - - def generate_mod(py_file): - log.info("generating cffi module %r" % py_file) - mkpath(os.path.dirname(py_file)) - updated = recompiler.make_py_source(ffi, module_name, py_file) - if not updated: - log.info("already up-to-date") - - base_class = dist.cmdclass.get('build_py', build_py) - class build_py_make_mod(base_class): - def run(self): - base_class.run(self) - module_path = module_name.split('.') - module_path[-1] += '.py' - generate_mod(os.path.join(self.build_lib, *module_path)) - def get_source_files(self): - # This is called from 'setup.py sdist' only. Exclude - # the generate .py module in this case. - saved_py_modules = self.py_modules - try: - if saved_py_modules: - self.py_modules = [m for m in saved_py_modules - if m != module_name] - return base_class.get_source_files(self) - finally: - self.py_modules = saved_py_modules - dist.cmdclass['build_py'] = build_py_make_mod - - # distutils and setuptools have no notion I could find of a - # generated python module. If we don't add module_name to - # dist.py_modules, then things mostly work but there are some - # combination of options (--root and --record) that will miss - # the module. So we add it here, which gives a few apparently - # harmless warnings about not finding the file outside the - # build directory. - # Then we need to hack more in get_source_files(); see above. - if dist.py_modules is None: - dist.py_modules = [] - dist.py_modules.append(module_name) - - # the following is only for "build_ext -i" - base_class_2 = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class_2): - def run(self): - base_class_2.run(self) - if self.inplace: - # from get_ext_fullpath() in distutils/command/build_ext.py - module_path = module_name.split('.') - package = '.'.join(module_path[:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = build_py.get_package_dir(package) - file_name = module_path[-1] + '.py' - generate_mod(os.path.join(package_dir, file_name)) - dist.cmdclass['build_ext'] = build_ext_make_mod - -def cffi_modules(dist, attr, value): - assert attr == 'cffi_modules' - if isinstance(value, basestring): - value = [value] - - for cffi_module in value: - add_cffi_module(dist, cffi_module) diff --git a/venv/lib/python3.10/site-packages/cffi/vengine_cpy.py b/venv/lib/python3.10/site-packages/cffi/vengine_cpy.py deleted file mode 100644 index eb0b6f7..0000000 --- a/venv/lib/python3.10/site-packages/cffi/vengine_cpy.py +++ /dev/null @@ -1,1084 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys -from . import model -from .error import VerificationError -from . import _imp_emulation as imp - - -class VCPythonEngine(object): - _class_key = 'x' - _gen_python_module = True - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self._struct_pending_verification = {} - self._types_of_builtin_functions = {} - - def patch_extension_kwds(self, kwds): - pass - - def find_module(self, module_name, path, so_suffixes): - try: - f, filename, descr = imp.find_module(module_name, path) - except ImportError: - return None - if f is not None: - f.close() - # Note that after a setuptools installation, there are both .py - # and .so files with the same basename. The code here relies on - # imp.find_module() locating the .so in priority. - if descr[0] not in so_suffixes: - return None - return filename - - def collect_types(self): - self._typesdict = {} - self._generate("collecttype") - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _do_collect_type(self, tp): - if ((not isinstance(tp, model.PrimitiveType) - or tp.name == 'long double') - and tp not in self._typesdict): - num = len(self._typesdict) - self._typesdict[tp] = num - - def write_source_to_f(self): - self.collect_types() - # - # The new module will have a _cffi_setup() function that receives - # objects from the ffi world, and that calls some setup code in - # the module. This setup code is split in several independent - # functions, e.g. one per constant. The functions are "chained" - # by ending in a tail call to each other. - # - # This is further split in two chained lists, depending on if we - # can do it at import-time or if we must wait for _cffi_setup() to - # provide us with the objects. This is needed because we - # need the values of the enum constants in order to build the - # that we may have to pass to _cffi_setup(). - # - # The following two 'chained_list_constants' items contains - # the head of these two chained lists, as a string that gives the - # call to do, if any. - self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] - # - prnt = self._prnt - # first paste some standard set of lines that are mostly '#define' - prnt(cffimod_header) - prnt() - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate("decl") - # - # implement the function _cffi_setup_custom() as calling the - # head of the chained list. - self._generate_setup_custom() - prnt() - # - # produce the method table, including the entries for the - # generated Python->C function wrappers, which are done - # by generate_cpy_function_method(). - prnt('static PyMethodDef _cffi_methods[] = {') - self._generate("method") - prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') - prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') - prnt('};') - prnt() - # - # standard init. - modname = self.verifier.get_module_name() - constants = self._chained_list_constants[False] - prnt('#if PY_MAJOR_VERSION >= 3') - prnt() - prnt('static struct PyModuleDef _cffi_module_def = {') - prnt(' PyModuleDef_HEAD_INIT,') - prnt(' "%s",' % modname) - prnt(' NULL,') - prnt(' -1,') - prnt(' _cffi_methods,') - prnt(' NULL, NULL, NULL, NULL') - prnt('};') - prnt() - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = PyModule_Create(&_cffi_module_def);') - prnt(' if (lib == NULL)') - prnt(' return NULL;') - prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) - prnt(' Py_DECREF(lib);') - prnt(' return NULL;') - prnt(' }') - prnt(' return lib;') - prnt('}') - prnt() - prnt('#else') - prnt() - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) - prnt(' if (lib == NULL)') - prnt(' return;') - prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) - prnt(' return;') - prnt(' return;') - prnt('}') - prnt() - prnt('#endif') - - def load_library(self, flags=None): - # XXX review all usages of 'self' here! - # import it as a new extension module - imp.acquire_lock() - try: - if hasattr(sys, "getdlopenflags"): - previous_flags = sys.getdlopenflags() - try: - if hasattr(sys, "setdlopenflags") and flags is not None: - sys.setdlopenflags(flags) - module = imp.load_dynamic(self.verifier.get_module_name(), - self.verifier.modulefilename) - except ImportError as e: - error = "importing %r: %s" % (self.verifier.modulefilename, e) - raise VerificationError(error) - finally: - if hasattr(sys, "setdlopenflags"): - sys.setdlopenflags(previous_flags) - finally: - imp.release_lock() - # - # call loading_cpy_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - # - # the C code will need the objects. Collect them in - # order in a list. - revmapping = dict([(value, key) - for (key, value) in self._typesdict.items()]) - lst = [revmapping[i] for i in range(len(revmapping))] - lst = list(map(self.ffi._get_cached_btype, lst)) - # - # build the FFILibrary class and instance and call _cffi_setup(). - # this will set up some fields like '_cffi_types', and only then - # it will invoke the chained list of functions that will really - # build (notably) the constant objects, as if they are - # pointers, and store them as attributes on the 'library' object. - class FFILibrary(object): - _cffi_python_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir + list(self.__dict__) - library = FFILibrary() - if module._cffi_setup(lst, VerificationError, library): - import warnings - warnings.warn("reimporting %r might overwrite older definitions" - % (self.verifier.get_module_name())) - # - # finally, call the loaded_cpy_xxx() functions. This will perform - # the final adjustments, like copying the Python->C wrapper - # functions from the module to the 'library' object, and setting - # up the FFILibrary class with properties for the global C variables. - self._load(module, 'loaded', library=library) - module._cffi_original_ffi = self.ffi - module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif tp.is_complex_type(): - raise VerificationError( - "not implemented in verify(): complex types") - else: - converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), - tp.name.replace(' ', '_')) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif isinstance(tp, (model.StructOrUnion, model.EnumType)): - # a struct (not a struct pointer) as a function argument - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - 'alloca((size_t)datasize) : NULL;' % (tovar,)) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif tp.name != 'long double': - return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs: generates no code so far - - _generate_cpy_typedef_collecttype = _generate_nothing - _generate_cpy_typedef_decl = _generate_nothing - _generate_cpy_typedef_method = _generate_nothing - _loading_cpy_typedef = _loaded_noop - _loaded_cpy_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - self._do_collect_type(tp) - else: - # don't call _do_collect_type(tp) in this common case, - # otherwise test_autofilled_struct_as_argument fails - for type in tp.args: - self._do_collect_type(type) - self._do_collect_type(tp.result) - - def _generate_cpy_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - prnt(' %s;' % type.get_c_name(' x%d' % i, context)) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - prnt(' %s;' % tp.result.get_c_name(' result', context)) - prnt(' PyObject *pyresult;') - else: - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( - 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - prnt(' { %s%s(%s); }' % ( - result_code, name, - ', '.join(['x%d' % i for i in range(len(tp.args))]))) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - prnt() - - def _generate_cpy_function_method(self, tp, name): - if tp.ellipsis: - return - numargs = len(tp.args) - if numargs == 0: - meth = 'METH_NOARGS' - elif numargs == 1: - meth = 'METH_O' - else: - meth = 'METH_VARARGS' - self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) - - _loading_cpy_function = _loaded_noop - - def _loaded_cpy_function(self, tp, name, module, library): - if tp.ellipsis: - return - func = getattr(module, name) - setattr(library, name, func) - self._types_of_builtin_functions[func] = tp - - # ---------- - # named structs - - _generate_cpy_struct_collecttype = _generate_nothing - def _generate_cpy_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - def _generate_cpy_struct_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'struct', name) - def _loading_cpy_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - def _loaded_cpy_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - _generate_cpy_union_collecttype = _generate_nothing - def _generate_cpy_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - def _generate_cpy_union_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'union', name) - def _loading_cpy_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - def _loaded_cpy_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('static PyObject *') - prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static Py_ssize_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' (void)self; /* unused */') - prnt(' (void)noarg; /* unused */') - prnt(' return _cffi_get_struct_layout(nums);') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _generate_struct_or_union_method(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, - layoutfuncname)) - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - function = getattr(module, layoutfuncname) - layout = function() - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - _generate_cpy_anonymous_collecttype = _generate_nothing - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _generate_cpy_anonymous_method(self, tp, name): - if not isinstance(tp, model.EnumType): - self._generate_struct_or_union_method(tp, '', name) - - def _loading_cpy_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_cpy_enum(tp, name, module) - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_cpy_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_cpy_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - vartp=None, delayed=True, size_too=False, - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - prnt(' PyObject *o;') - prnt(' int res;') - if not is_int: - prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) - else: - assert category == 'const' - # - if check_value is not None: - self._check_int_constant_value(name, check_value) - # - if not is_int: - if category == 'var': - realexpr = '&' + name - else: - realexpr = name - prnt(' i = (%s);' % (realexpr,)) - prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', - 'variable type'),)) - assert delayed - else: - prnt(' o = _cffi_from_c_int_const(%s);' % name) - prnt(' if (o == NULL)') - prnt(' return -1;') - if size_too: - prnt(' {') - prnt(' PyObject *o1 = o;') - prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' - % (name,)) - prnt(' Py_DECREF(o1);') - prnt(' if (o == NULL)') - prnt(' return -1;') - prnt(' }') - prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) - prnt(' Py_DECREF(o);') - prnt(' if (res < 0)') - prnt(' return -1;') - prnt(' return %s;' % self._chained_list_constants[delayed]) - self._chained_list_constants[delayed] = funcname + '(lib)' - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - if not is_int: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - _generate_cpy_constant_method = _generate_nothing - _loading_cpy_constant = _loaded_noop - _loaded_cpy_constant = _loaded_noop - - # ---------- - # enums - - def _check_int_constant_value(self, name, value, err_prefix=''): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % - name) - prnt(' PyErr_Format(_cffi_VerificationError,') - prnt(' "%s%s has the real value %s, not %s",') - prnt(' "%s", "%s", buf, "%d");' % ( - err_prefix, name, value)) - prnt(' return -1;') - prnt(' }') - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator, delayed=False) - return - # - funcname = self._enum_funcname(prefix, name) - prnt = self._prnt - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue, - "enum %s: " % name) - prnt(' return %s;' % self._chained_list_constants[True]) - self._chained_list_constants[True] = funcname + '(lib)' - prnt('}') - prnt() - - _generate_cpy_enum_collecttype = _generate_nothing - _generate_cpy_enum_method = _generate_nothing - - def _loading_cpy_enum(self, tp, name, module): - if tp.partial: - enumvalues = [getattr(module, enumerator) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - - def _loaded_cpy_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - _generate_cpy_macro_collecttype = _generate_nothing - _generate_cpy_macro_method = _generate_nothing - _loading_cpy_macro = _loaded_noop - _loaded_cpy_macro = _loaded_noop - - # ---------- - # global variables - - def _generate_cpy_variable_collecttype(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - else: - tp_ptr = model.PointerType(tp) - self._do_collect_type(tp_ptr) - - def _generate_cpy_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - self._generate_cpy_const(False, name, tp, vartp=tp_ptr, - size_too = tp.length_is_unknown()) - else: - tp_ptr = model.PointerType(tp) - self._generate_cpy_const(False, name, tp_ptr, category='var') - - _generate_cpy_variable_method = _generate_nothing - _loading_cpy_variable = _loaded_noop - - def _loaded_cpy_variable(self, tp, name, module, library): - value = getattr(library, name) - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - assert isinstance(value, tuple) - (value, size) = value - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - ptr = value - delattr(library, name) - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - - # ---------- - - def _generate_setup_custom(self): - prnt = self._prnt - prnt('static int _cffi_setup_custom(PyObject *lib)') - prnt('{') - prnt(' return %s;' % self._chained_list_constants[True]) - prnt('}') - -cffimod_header = r''' -#include -#include - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#if PY_MAJOR_VERSION < 3 -# undef PyCapsule_CheckExact -# undef PyCapsule_GetPointer -# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) -# define PyCapsule_GetPointer(capsule, name) \ - (PyCObject_AsVoidPtr(capsule)) -#endif - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int_const(x) \ - (((x) > 0) ? \ - ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ - ((long long)(x) >= (long long)LONG_MIN) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromLongLong((long long)(x))) - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) -#define _CFFI_NUM_EXPORTS 25 - -typedef struct _ctypedescr CTypeDescrObject; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; -static PyObject *_cffi_types, *_cffi_VerificationError; - -static int _cffi_setup_custom(PyObject *lib); /* forward */ - -static PyObject *_cffi_setup(PyObject *self, PyObject *args) -{ - PyObject *library; - int was_alive = (_cffi_types != NULL); - (void)self; /* unused */ - if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, - &library)) - return NULL; - Py_INCREF(_cffi_types); - Py_INCREF(_cffi_VerificationError); - if (_cffi_setup_custom(library) < 0) - return NULL; - return PyBool_FromLong(was_alive); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -static int _cffi_init(void) -{ - PyObject *module, *c_api_object = NULL; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - c_api_object = PyObject_GetAttrString(module, "_C_API"); - if (c_api_object == NULL) - goto failure; - if (!PyCapsule_CheckExact(c_api_object)) { - PyErr_SetNone(PyExc_ImportError); - goto failure; - } - memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), - _CFFI_NUM_EXPORTS * sizeof(void *)); - - Py_DECREF(module); - Py_DECREF(c_api_object); - return 0; - - failure: - Py_XDECREF(module); - Py_XDECREF(c_api_object); - return -1; -} - -#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) - -/**********/ -''' diff --git a/venv/lib/python3.10/site-packages/cffi/vengine_gen.py b/venv/lib/python3.10/site-packages/cffi/vengine_gen.py deleted file mode 100644 index bffc821..0000000 --- a/venv/lib/python3.10/site-packages/cffi/vengine_gen.py +++ /dev/null @@ -1,679 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os -import types - -from . import model -from .error import VerificationError - - -class VGenericEngine(object): - _class_key = 'g' - _gen_python_module = False - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self.export_symbols = [] - self._struct_pending_verification = {} - - def patch_extension_kwds(self, kwds): - # add 'export_symbols' to the dictionary. Note that we add the - # list before filling it. When we fill it, it will thus also show - # up in kwds['export_symbols']. - kwds.setdefault('export_symbols', self.export_symbols) - - def find_module(self, module_name, path, so_suffixes): - for so_suffix in so_suffixes: - basename = module_name + so_suffix - if path is None: - path = sys.path - for dirname in path: - filename = os.path.join(dirname, basename) - if os.path.isfile(filename): - return filename - - def collect_types(self): - pass # not needed in the generic engine - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self): - prnt = self._prnt - # first paste some standard set of lines that are mostly '#include' - prnt(cffimod_header) - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - # - # call generate_gen_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate('decl') - # - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - if sys.platform == 'win32': - if sys.version_info >= (3,): - prefix = 'PyInit_' - else: - prefix = 'init' - modname = self.verifier.get_module_name() - prnt("void %s%s(void) { }\n" % (prefix, modname)) - - def load_library(self, flags=0): - # import it with the CFFI backend - backend = self.ffi._backend - # needs to make a path that contains '/', on Posix - filename = os.path.join(os.curdir, self.verifier.modulefilename) - module = backend.load_library(filename, flags) - # - # call loading_gen_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - - # build the FFILibrary class and instance, this is a module subclass - # because modules are expected to have usually-constant-attributes and - # in PyPy this means the JIT is able to treat attributes as constant, - # which we want. - class FFILibrary(types.ModuleType): - _cffi_generic_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir - library = FFILibrary("") - # - # finally, call the loaded_gen_xxx() functions. This will set - # up the 'library' object. - self._load(module, 'loaded', library=library) - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_gen_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_gen_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - # typedefs: generates no code so far - - _generate_gen_typedef_decl = _generate_nothing - _loading_gen_typedef = _loaded_noop - _loaded_gen_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_gen_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no _cffi_f_%s wrapper) - self._generate_gen_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - argnames = [] - for i, type in enumerate(tp.args): - indirection = '' - if isinstance(type, model.StructOrUnion): - indirection = '*' - argnames.append('%sx%d' % (indirection, i)) - context = 'argument of %s' % name - arglist = [type.get_c_name(' %s' % arg, context) - for type, arg in zip(tp.args, argnames)] - tpresult = tp.result - if isinstance(tpresult, model.StructOrUnion): - arglist.insert(0, tpresult.get_c_name(' *r', context)) - tpresult = model.void_type - arglist = ', '.join(arglist) or 'void' - wrappername = '_cffi_f_%s' % name - self.export_symbols.append(wrappername) - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) - context = 'result of %s' % name - prnt(tpresult.get_c_name(funcdecl, context)) - prnt('{') - # - if isinstance(tp.result, model.StructOrUnion): - result_code = '*r = ' - elif not isinstance(tp.result, model.VoidType): - result_code = 'return ' - else: - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) - prnt('}') - prnt() - - _loading_gen_function = _loaded_noop - - def _loaded_gen_function(self, tp, name, module, library): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - newfunction = self._load_constant(False, tp, name, module) - else: - indirections = [] - base_tp = tp - if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) - or isinstance(tp.result, model.StructOrUnion)): - indirect_args = [] - for i, typ in enumerate(tp.args): - if isinstance(typ, model.StructOrUnion): - typ = model.PointerType(typ) - indirections.append((i, typ)) - indirect_args.append(typ) - indirect_result = tp.result - if isinstance(indirect_result, model.StructOrUnion): - if indirect_result.fldtypes is None: - raise TypeError("'%s' is used as result type, " - "but is opaque" % ( - indirect_result._get_c_name(),)) - indirect_result = model.PointerType(indirect_result) - indirect_args.insert(0, indirect_result) - indirections.insert(0, ("result", indirect_result)) - indirect_result = model.void_type - tp = model.FunctionPtrType(tuple(indirect_args), - indirect_result, tp.ellipsis) - BFunc = self.ffi._get_cached_btype(tp) - wrappername = '_cffi_f_%s' % name - newfunction = module.load_function(BFunc, wrappername) - for i, typ in indirections: - newfunction = self._make_struct_wrapper(newfunction, i, typ, - base_tp) - setattr(library, name, newfunction) - type(library)._cffi_dir.append(name) - - def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): - backend = self.ffi._backend - BType = self.ffi._get_cached_btype(tp) - if i == "result": - ffi = self.ffi - def newfunc(*args): - res = ffi.new(BType) - oldfunc(res, *args) - return res[0] - else: - def newfunc(*args): - args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] - return oldfunc(*args) - newfunc._cffi_base_type = base_tp - return newfunc - - # ---------- - # named structs - - def _generate_gen_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - - def _loading_gen_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - - def _loaded_gen_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_gen_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - - def _loading_gen_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - - def _loaded_gen_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - self.export_symbols.append(layoutfuncname) - prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static intptr_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' return nums[i];') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] - function = module.load_function(BFunc, layoutfuncname) - layout = [] - num = 0 - while True: - x = function(num) - if x < 0: break - layout.append(x) - num += 1 - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_gen_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_gen_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _loading_gen_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_gen_enum(tp, name, module, '') - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_gen_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_gen_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_gen_const(self, is_int, name, tp=None, category='const', - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - self.export_symbols.append(funcname) - if check_value is not None: - assert is_int - assert category == 'const' - prnt('int %s(char *out_error)' % funcname) - prnt('{') - self._check_int_constant_value(name, check_value) - prnt(' return 0;') - prnt('}') - elif is_int: - assert category == 'const' - prnt('int %s(long long *out_value)' % funcname) - prnt('{') - prnt(' *out_value = (long long)(%s);' % (name,)) - prnt(' return (%s) <= 0;' % (name,)) - prnt('}') - else: - assert tp is not None - assert check_value is None - if category == 'var': - ampersand = '&' - else: - ampersand = '' - extra = '' - if category == 'const' and isinstance(tp, model.StructOrUnion): - extra = 'const *' - ampersand = '&' - prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) - prnt('{') - prnt(' return (%s%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_gen_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_gen_const(is_int, name, tp) - - _loading_gen_constant = _loaded_noop - - def _load_constant(self, is_int, tp, name, module, check_value=None): - funcname = '_cffi_const_%s' % name - if check_value is not None: - assert is_int - self._load_known_int_constant(module, funcname) - value = check_value - elif is_int: - BType = self.ffi._typeof_locked("long long*")[0] - BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType) - negative = function(p) - value = int(p[0]) - if value < 0 and not negative: - BLongLong = self.ffi._typeof_locked("long long")[0] - value += (1 << (8*self.ffi.sizeof(BLongLong))) - else: - assert check_value is None - fntypeextra = '(*)(void)' - if isinstance(tp, model.StructOrUnion): - fntypeextra = '*' + fntypeextra - BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] - function = module.load_function(BFunc, funcname) - value = function() - if isinstance(tp, model.StructOrUnion): - value = value[0] - return value - - def _loaded_gen_constant(self, tp, name, module, library): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - value = self._load_constant(is_int, tp, name, module) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # enums - - def _check_int_constant_value(self, name, value): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % - name) - prnt(' sprintf(out_error, "%s has the real value %s, not %s",') - prnt(' "%s", buf, "%d");' % (name[:100], value)) - prnt(' return -1;') - prnt(' }') - - def _load_known_int_constant(self, module, funcname): - BType = self.ffi._typeof_locked("char[]")[0] - BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType, 256) - if function(p) < 0: - error = self.ffi.string(p) - if sys.version_info >= (3,): - error = str(error, 'utf-8') - raise VerificationError(error) - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_gen_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_gen_const(True, enumerator) - return - # - funcname = self._enum_funcname(prefix, name) - self.export_symbols.append(funcname) - prnt = self._prnt - prnt('int %s(char *out_error)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue) - prnt(' return 0;') - prnt('}') - prnt() - - def _loading_gen_enum(self, tp, name, module, prefix='enum'): - if tp.partial: - enumvalues = [self._load_constant(True, tp, enumerator, module) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - else: - funcname = self._enum_funcname(prefix, name) - self._load_known_int_constant(module, funcname) - - def _loaded_gen_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - type(library)._cffi_dir.append(enumerator) - - # ---------- - # macros: for now only for integers - - def _generate_gen_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_gen_const(True, name, check_value=check_value) - - _loading_gen_macro = _loaded_noop - - def _loaded_gen_macro(self, tp, name, module, library): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - value = self._load_constant(True, tp, name, module, - check_value=check_value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # global variables - - def _generate_gen_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - if tp.length_is_unknown(): - prnt = self._prnt - funcname = '_cffi_sizeof_%s' % (name,) - self.export_symbols.append(funcname) - prnt("size_t %s(void)" % funcname) - prnt("{") - prnt(" return sizeof(%s);" % (name,)) - prnt("}") - tp_ptr = model.PointerType(tp.item) - self._generate_gen_const(False, name, tp_ptr) - else: - tp_ptr = model.PointerType(tp) - self._generate_gen_const(False, name, tp_ptr, category='var') - - _loading_gen_variable = _loaded_noop - - def _loaded_gen_variable(self, tp, name, module, library): - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - funcname = '_cffi_sizeof_%s' % (name,) - BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] - function = module.load_function(BFunc, funcname) - size = function() - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - tp_ptr = model.PointerType(tp.item) - value = self._load_constant(False, tp_ptr, name, module) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - funcname = '_cffi_var_%s' % name - BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] - function = module.load_function(BFunc, funcname) - ptr = function() - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - -cffimod_header = r''' -#include -#include -#include -#include -#include /* XXX for ssize_t on some platforms */ - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif -''' diff --git a/venv/lib/python3.10/site-packages/cffi/verifier.py b/venv/lib/python3.10/site-packages/cffi/verifier.py deleted file mode 100644 index e392a2b..0000000 --- a/venv/lib/python3.10/site-packages/cffi/verifier.py +++ /dev/null @@ -1,306 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os, binascii, shutil, io -from . import __version_verifier_modules__ -from . import ffiplatform -from .error import VerificationError - -if sys.version_info >= (3, 3): - import importlib.machinery - def _extension_suffixes(): - return importlib.machinery.EXTENSION_SUFFIXES[:] -else: - import imp - def _extension_suffixes(): - return [suffix for suffix, _, type in imp.get_suffixes() - if type == imp.C_EXTENSION] - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - - -class Verifier(object): - - def __init__(self, ffi, preamble, tmpdir=None, modulename=None, - ext_package=None, tag='', force_generic_engine=False, - source_extension='.c', flags=None, relative_to=None, **kwds): - if ffi._parser._uses_new_feature: - raise VerificationError( - "feature not supported with ffi.verify(), but only " - "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) - self.ffi = ffi - self.preamble = preamble - if not modulename: - flattened_kwds = ffiplatform.flatten(kwds) - vengine_class = _locate_engine_class(ffi, force_generic_engine) - self._vengine = vengine_class(self) - self._vengine.patch_extension_kwds(kwds) - self.flags = flags - self.kwds = self.make_relative_to(kwds, relative_to) - # - if modulename: - if tag: - raise TypeError("can't specify both 'modulename' and 'tag'") - else: - key = '\x00'.join(['%d.%d' % sys.version_info[:2], - __version_verifier_modules__, - preamble, flattened_kwds] + - ffi._cdefsources) - if sys.version_info >= (3,): - key = key.encode('utf-8') - k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) - k1 = k1.lstrip('0x').rstrip('L') - k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) - k2 = k2.lstrip('0').rstrip('L') - modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, - k1, k2) - suffix = _get_so_suffixes()[0] - self.tmpdir = tmpdir or _caller_dir_pycache() - self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) - self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) - self.ext_package = ext_package - self._has_source = False - self._has_module = False - - def write_source(self, file=None): - """Write the C source code. It is produced in 'self.sourcefilename', - which can be tweaked beforehand.""" - with self.ffi._lock: - if self._has_source and file is None: - raise VerificationError( - "source code already written") - self._write_source(file) - - def compile_module(self): - """Write the C source code (if not done already) and compile it. - This produces a dynamic link library in 'self.modulefilename'.""" - with self.ffi._lock: - if self._has_module: - raise VerificationError("module already compiled") - if not self._has_source: - self._write_source() - self._compile_module() - - def load_library(self): - """Get a C module from this Verifier instance. - Returns an instance of a FFILibrary class that behaves like the - objects returned by ffi.dlopen(), but that delegates all - operations to the C module. If necessary, the C code is written - and compiled first. - """ - with self.ffi._lock: - if not self._has_module: - self._locate_module() - if not self._has_module: - if not self._has_source: - self._write_source() - self._compile_module() - return self._load_library() - - def get_module_name(self): - basename = os.path.basename(self.modulefilename) - # kill both the .so extension and the other .'s, as introduced - # by Python 3: 'basename.cpython-33m.so' - basename = basename.split('.', 1)[0] - # and the _d added in Python 2 debug builds --- but try to be - # conservative and not kill a legitimate _d - if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): - basename = basename[:-2] - return basename - - def get_extension(self): - if not self._has_source: - with self.ffi._lock: - if not self._has_source: - self._write_source() - sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) - modname = self.get_module_name() - return ffiplatform.get_extension(sourcename, modname, **self.kwds) - - def generates_python_module(self): - return self._vengine._gen_python_module - - def make_relative_to(self, kwds, relative_to): - if relative_to and os.path.dirname(relative_to): - dirname = os.path.dirname(relative_to) - kwds = kwds.copy() - for key in ffiplatform.LIST_OF_FILE_NAMES: - if key in kwds: - lst = kwds[key] - if not isinstance(lst, (list, tuple)): - raise TypeError("keyword '%s' should be a list or tuple" - % (key,)) - lst = [os.path.join(dirname, fn) for fn in lst] - kwds[key] = lst - return kwds - - # ---------- - - def _locate_module(self): - if not os.path.isfile(self.modulefilename): - if self.ext_package: - try: - pkg = __import__(self.ext_package, None, None, ['__doc__']) - except ImportError: - return # cannot import the package itself, give up - # (e.g. it might be called differently before installation) - path = pkg.__path__ - else: - path = None - filename = self._vengine.find_module(self.get_module_name(), path, - _get_so_suffixes()) - if filename is None: - return - self.modulefilename = filename - self._vengine.collect_types() - self._has_module = True - - def _write_source_to(self, file): - self._vengine._f = file - try: - self._vengine.write_source_to_f() - finally: - del self._vengine._f - - def _write_source(self, file=None): - if file is not None: - self._write_source_to(file) - else: - # Write our source file to an in memory file. - f = NativeIO() - self._write_source_to(f) - source_data = f.getvalue() - - # Determine if this matches the current file - if os.path.exists(self.sourcefilename): - with open(self.sourcefilename, "r") as fp: - needs_written = not (fp.read() == source_data) - else: - needs_written = True - - # Actually write the file out if it doesn't match - if needs_written: - _ensure_dir(self.sourcefilename) - with open(self.sourcefilename, "w") as fp: - fp.write(source_data) - - # Set this flag - self._has_source = True - - def _compile_module(self): - # compile this C source - tmpdir = os.path.dirname(self.sourcefilename) - outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) - try: - same = ffiplatform.samefile(outputfilename, self.modulefilename) - except OSError: - same = False - if not same: - _ensure_dir(self.modulefilename) - shutil.move(outputfilename, self.modulefilename) - self._has_module = True - - def _load_library(self): - assert self._has_module - if self.flags is not None: - return self._vengine.load_library(self.flags) - else: - return self._vengine.load_library() - -# ____________________________________________________________ - -_FORCE_GENERIC_ENGINE = False # for tests - -def _locate_engine_class(ffi, force_generic_engine): - if _FORCE_GENERIC_ENGINE: - force_generic_engine = True - if not force_generic_engine: - if '__pypy__' in sys.builtin_module_names: - force_generic_engine = True - else: - try: - import _cffi_backend - except ImportError: - _cffi_backend = '?' - if ffi._backend is not _cffi_backend: - force_generic_engine = True - if force_generic_engine: - from . import vengine_gen - return vengine_gen.VGenericEngine - else: - from . import vengine_cpy - return vengine_cpy.VCPythonEngine - -# ____________________________________________________________ - -_TMPDIR = None - -def _caller_dir_pycache(): - if _TMPDIR: - return _TMPDIR - result = os.environ.get('CFFI_TMPDIR') - if result: - return result - filename = sys._getframe(2).f_code.co_filename - return os.path.abspath(os.path.join(os.path.dirname(filename), - '__pycache__')) - -def set_tmpdir(dirname): - """Set the temporary directory to use instead of __pycache__.""" - global _TMPDIR - _TMPDIR = dirname - -def cleanup_tmpdir(tmpdir=None, keep_so=False): - """Clean up the temporary directory by removing all files in it - called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" - tmpdir = tmpdir or _caller_dir_pycache() - try: - filelist = os.listdir(tmpdir) - except OSError: - return - if keep_so: - suffix = '.c' # only remove .c files - else: - suffix = _get_so_suffixes()[0].lower() - for fn in filelist: - if fn.lower().startswith('_cffi_') and ( - fn.lower().endswith(suffix) or fn.lower().endswith('.c')): - try: - os.unlink(os.path.join(tmpdir, fn)) - except OSError: - pass - clean_dir = [os.path.join(tmpdir, 'build')] - for dir in clean_dir: - try: - for fn in os.listdir(dir): - fn = os.path.join(dir, fn) - if os.path.isdir(fn): - clean_dir.append(fn) - else: - os.unlink(fn) - except OSError: - pass - -def _get_so_suffixes(): - suffixes = _extension_suffixes() - if not suffixes: - # bah, no C_EXTENSION available. Occurs on pypy without cpyext - if sys.platform == 'win32': - suffixes = [".pyd"] - else: - suffixes = [".so"] - - return suffixes - -def _ensure_dir(filename): - dirname = os.path.dirname(filename) - if dirname and not os.path.isdir(dirname): - os.makedirs(dirname) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/METADATA deleted file mode 100644 index 573d88b..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/METADATA +++ /dev/null @@ -1,731 +0,0 @@ -Metadata-Version: 2.4 -Name: charset-normalizer -Version: 3.4.2 -Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. -Author-email: "Ahmed R. TAHRI" -Maintainer-email: "Ahmed R. TAHRI" -License: MIT -Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md -Project-URL: Documentation, https://charset-normalizer.readthedocs.io/ -Project-URL: Code, https://github.com/jawah/charset_normalizer -Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues -Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing :: Linguistic -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: unicode-backport -Dynamic: license-file - -

Charset Detection, for Everyone 👋

- -

- The Real First Universal Charset Detector
- - - - - Download Count Total - - - - -

-

- Featured Packages
- - Static Badge - - - Static Badge - -

-

- In other language (unofficial port - by the community)
- - Static Badge - -

- -> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, -> I'm trying to resolve the issue by taking a new approach. -> All IANA character set names for which the Python core library provides codecs are supported. - -

- >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< -

- -This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. - -| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | -|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| -| `Fast` | ❌ | ✅ | ✅ | -| `Universal**` | ❌ | ✅ | ❌ | -| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | -| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | -| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | -| `Native Python` | ✅ | ✅ | ❌ | -| `Detect spoken language` | ❌ | ✅ | N/A | -| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | -| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | -| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | - -

-Reading Normalized TextCat Reading Text -

- -*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
- -## ⚡ Performance - -This package offer better performance than its counterpart Chardet. Here are some numbers. - -| Package | Accuracy | Mean per file (ms) | File per sec (est) | -|-----------------------------------------------|:--------:|:------------------:|:------------------:| -| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec | -| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | - -| Package | 99th percentile | 95th percentile | 50th percentile | -|-----------------------------------------------|:---------------:|:---------------:|:---------------:| -| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms | -| charset-normalizer | 100 ms | 50 ms | 5 ms | - -_updated as of december 2024 using CPython 3.12_ - -Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. - -> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. -> And yes, these results might change at any time. The dataset can be updated to include more files. -> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. -> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability -> (e.g. Supported Encoding) Challenge-them if you want. - -## ✨ Installation - -Using pip: - -```sh -pip install charset-normalizer -U -``` - -## 🚀 Basic Usage - -### CLI -This package comes with a CLI. - -``` -usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] - file [file ...] - -The Real First Universal Charset Detector. Discover originating encoding used -on text file. Normalize text to unicode. - -positional arguments: - files File(s) to be analysed - -optional arguments: - -h, --help show this help message and exit - -v, --verbose Display complementary information about file if any. - Stdout will contain logs about the detection process. - -a, --with-alternative - Output complementary possibilities if any. Top-level - JSON WILL be a list. - -n, --normalize Permit to normalize input file. If not set, program - does not write anything. - -m, --minimal Only output the charset detected to STDOUT. Disabling - JSON output. - -r, --replace Replace file when trying to normalize it instead of - creating a new one. - -f, --force Replace file without asking if you are sure, use this - flag with caution. - -t THRESHOLD, --threshold THRESHOLD - Define a custom maximum amount of chaos allowed in - decoded content. 0. <= chaos <= 1. - --version Show version information and exit. -``` - -```bash -normalizer ./data/sample.1.fr.srt -``` - -or - -```bash -python -m charset_normalizer ./data/sample.1.fr.srt -``` - -🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. - -```json -{ - "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", - "encoding": "cp1252", - "encoding_aliases": [ - "1252", - "windows_1252" - ], - "alternative_encodings": [ - "cp1254", - "cp1256", - "cp1258", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - "mbcs" - ], - "language": "French", - "alphabets": [ - "Basic Latin", - "Latin-1 Supplement" - ], - "has_sig_or_bom": false, - "chaos": 0.149, - "coherence": 97.152, - "unicode_path": null, - "is_preferred": true -} -``` - -### Python -*Just print out normalized text* -```python -from charset_normalizer import from_path - -results = from_path('./my_subtitle.srt') - -print(str(results.best())) -``` - -*Upgrade your code without effort* -```python -from charset_normalizer import detect -``` - -The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. - -See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) - -## 😇 Why - -When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a -reliable alternative using a completely different method. Also! I never back down on a good challenge! - -I **don't care** about the **originating charset** encoding, because **two different tables** can -produce **two identical rendered string.** -What I want is to get readable text, the best I can. - -In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 - -Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. - -## 🍰 How - - - Discard all charset encoding table that could not fit the binary content. - - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. - - Extract matches with the lowest mess detected. - - Additionally, we measure coherence / probe for a language. - -**Wait a minute**, what is noise/mess and coherence according to **YOU ?** - -*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then -**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text). - I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to - improve or rewrite it. - -*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought -that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. - -## ⚡ Known limitations - - - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) - - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. - -## ⚠️ About Python EOLs - -**If you are running:** - -- Python >=2.7,<3.5: Unsupported -- Python 3.5: charset-normalizer < 2.1 -- Python 3.6: charset-normalizer < 3.1 -- Python 3.7: charset-normalizer < 4.0 - -Upgrade your Python interpreter as soon as possible. - -## 👤 Contributing - -Contributions, issues and feature requests are very much welcome.
-Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. - -## 📝 License - -Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
-This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. - -Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) - -## 💼 For Enterprise - -Professional support for charset-normalizer is available as part of the [Tidelift -Subscription][1]. Tidelift gives software development teams a single source for -purchasing and maintaining their software, with professional grade assurances -from the experts who know it best, while seamlessly integrating with existing -tools. - -[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme - -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297) - -# Changelog -All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02) - -### Fixed -- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591) -- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587) - -### Changed -- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8 - -## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24) - -### Changed -- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend. -- Enforce annotation delayed loading for a simpler and consistent types in the project. -- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8 - -### Added -- pre-commit configuration. -- noxfile. - -### Removed -- `build-requirements.txt` as per using `pyproject.toml` native build configuration. -- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile). -- `setup.cfg` in favor of `pyproject.toml` metadata configuration. -- Unused `utils.range_scan` function. - -### Fixed -- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572) -- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+ - -## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) - -### Added -- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. -- Support for Python 3.13 (#512) - -### Fixed -- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. -- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) -- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) - -## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) - -### Fixed -- Unintentional memory usage regression when using large payload that match several encoding (#376) -- Regression on some detection case showcased in the documentation (#371) - -### Added -- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) - -## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) - -### Changed -- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 -- Improved the general detection reliability based on reports from the community - -## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) - -### Added -- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` -- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) - -### Removed -- (internal) Redundant utils.is_ascii function and unused function is_private_use_only -- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant - -### Changed -- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection -- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 - -### Fixed -- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) - -## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) - -### Changed -- Typehint for function `from_path` no longer enforce `PathLike` as its first argument -- Minor improvement over the global detection reliability - -### Added -- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries -- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) -- Explicit support for Python 3.12 - -### Fixed -- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) - -## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) - -### Added -- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) - -### Removed -- Support for Python 3.6 (PR #260) - -### Changed -- Optional speedup provided by mypy/c 1.0.1 - -## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) - -### Fixed -- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) - -### Changed -- Speedup provided by mypy/c 0.990 on Python >= 3.7 - -## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it -- Sphinx warnings when generating the documentation - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' - -## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) - -### Added -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Removed -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) - -### Fixed -- Sphinx warnings when generating the documentation - -## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) - -### Changed -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Removed -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) - -### Deprecated -- Function `normalize` scheduled for removal in 3.0 - -### Changed -- Removed useless call to decode in fn is_unprintable (#206) - -### Fixed -- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) - -## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) - -### Added -- Output the Unicode table version when running the CLI with `--version` (PR #194) - -### Changed -- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) -- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) - -### Fixed -- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) -- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) - -### Removed -- Support for Python 3.5 (PR #192) - -### Deprecated -- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) - -## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) - -### Fixed -- ASCII miss-detection on rare cases (PR #170) - -## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) - -### Added -- Explicit support for Python 3.11 (PR #164) - -### Changed -- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) - -## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) - -### Fixed -- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) - -### Changed -- Skipping the language-detection (CD) on ASCII (PR #155) - -## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) - -### Changed -- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) - -### Fixed -- Wrong logging level applied when setting kwarg `explain` to True (PR #146) - -## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) -### Changed -- Improvement over Vietnamese detection (PR #126) -- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) -- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) -- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) -- Code style as refactored by Sourcery-AI (PR #131) -- Minor adjustment on the MD around european words (PR #133) -- Remove and replace SRTs from assets / tests (PR #139) -- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) - -### Fixed -- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) -- Avoid using too insignificant chunk (PR #137) - -### Added -- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) - -## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) -### Added -- Add support for Kazakh (Cyrillic) language detection (PR #109) - -### Changed -- Further, improve inferring the language from a given single-byte code page (PR #112) -- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) -- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) -- Various detection improvement (MD+CD) (PR #117) - -### Removed -- Remove redundant logging entry about detected language(s) (PR #115) - -### Fixed -- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) - -## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) -### Fixed -- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) -- Fix CLI crash when using --minimal output in certain cases (PR #103) - -### Changed -- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) - -## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) -### Changed -- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) -- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) -- The Unicode detection is slightly improved (PR #93) -- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) - -### Removed -- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) - -### Fixed -- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) -- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) -- The MANIFEST.in was not exhaustive (PR #78) - -## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) -### Fixed -- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) -- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) -- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) -- Submatch factoring could be wrong in rare edge cases (PR #72) -- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) -- Fix line endings from CRLF to LF for certain project files (PR #67) - -### Changed -- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) -- Allow fallback on specified encoding if any (PR #71) - -## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) -### Changed -- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) -- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) - -## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) -### Fixed -- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) - -### Changed -- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) - -## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) -### Fixed -- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) -- Using explain=False permanently disable the verbose output in the current runtime (PR #47) -- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) -- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) - -### Changed -- Public function normalize default args values were not aligned with from_bytes (PR #53) - -### Added -- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) - -## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) -### Changed -- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. -- Accent has been made on UTF-8 detection, should perform rather instantaneous. -- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. -- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) -- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ -- utf_7 detection has been reinstated. - -### Removed -- This package no longer require anything when used with Python 3.5 (Dropped cached_property) -- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. -- The exception hook on UnicodeDecodeError has been removed. - -### Deprecated -- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 - -### Fixed -- The CLI output used the relative path of the file(s). Should be absolute. - -## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) -### Fixed -- Logger configuration/usage no longer conflict with others (PR #44) - -## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) -### Removed -- Using standard logging instead of using the package loguru. -- Dropping nose test framework in favor of the maintained pytest. -- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. -- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. -- Stop support for UTF-7 that does not contain a SIG. -- Dropping PrettyTable, replaced with pure JSON output in CLI. - -### Fixed -- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. -- Not searching properly for the BOM when trying utf32/16 parent codec. - -### Changed -- Improving the package final size by compressing frequencies.json. -- Huge improvement over the larges payload. - -### Added -- CLI now produces JSON consumable output. -- Return ASCII if given sequences fit. Given reasonable confidence. - -## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) - -### Fixed -- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) - -## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) - -### Fixed -- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) - -## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) - -### Fixed -- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) - -## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) - -### Changed -- Amend the previous release to allow prettytable 2.0 (PR #35) - -## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) - -### Fixed -- Fix error while using the package with a python pre-release interpreter (PR #33) - -### Changed -- Dependencies refactoring, constraints revised. - -### Added -- Add python 3.9 and 3.10 to the supported interpreters - -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/RECORD deleted file mode 100644 index 480cc09..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/RECORD +++ /dev/null @@ -1,35 +0,0 @@ -../../../bin/normalizer,sha256=yplS0QDx0gMDkc0jMkEnXe0gVBaHwR3PrepWVbiPdtc,253 -charset_normalizer-3.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -charset_normalizer-3.4.2.dist-info/METADATA,sha256=zNVWE4rQW-YZIMHKSayMypu37bLj_xayLorHl0-HAHQ,35743 -charset_normalizer-3.4.2.dist-info/RECORD,, -charset_normalizer-3.4.2.dist-info/WHEEL,sha256=FOk_y2751-fgVoYMwSN0hhQ6FRjSKtKjY35GgZZuFS0,151 -charset_normalizer-3.4.2.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65 -charset_normalizer-3.4.2.dist-info/licenses/LICENSE,sha256=bQ1Bv-FwrGx9wkjJpj4lTQ-0WmDVCoJX0K-SxuJJuIc,1071 -charset_normalizer-3.4.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 -charset_normalizer/__init__.py,sha256=OKRxRv2Zhnqk00tqkN0c1BtJjm165fWXLydE52IKuHc,1590 -charset_normalizer/__main__.py,sha256=yzYxMR-IhKRHYwcSlavEv8oGdwxsR89mr2X09qXGdps,109 -charset_normalizer/__pycache__/__init__.cpython-310.pyc,, -charset_normalizer/__pycache__/__main__.cpython-310.pyc,, -charset_normalizer/__pycache__/api.cpython-310.pyc,, -charset_normalizer/__pycache__/cd.cpython-310.pyc,, -charset_normalizer/__pycache__/constant.cpython-310.pyc,, -charset_normalizer/__pycache__/legacy.cpython-310.pyc,, -charset_normalizer/__pycache__/md.cpython-310.pyc,, -charset_normalizer/__pycache__/models.cpython-310.pyc,, -charset_normalizer/__pycache__/utils.cpython-310.pyc,, -charset_normalizer/__pycache__/version.cpython-310.pyc,, -charset_normalizer/api.py,sha256=qBRz8mJ_R5E713R6TOyqHEdnmyxbEDnCSHvx32ubDGg,22617 -charset_normalizer/cd.py,sha256=WKTo1HDb-H9HfCDc3Bfwq5jzS25Ziy9SE2a74SgTq88,12522 -charset_normalizer/cli/__init__.py,sha256=D8I86lFk2-py45JvqxniTirSj_sFyE6sjaY_0-G1shc,136 -charset_normalizer/cli/__main__.py,sha256=dMaXG6IJXRvqq8z2tig7Qb83-BpWTln55ooiku5_uvg,12646 -charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc,, -charset_normalizer/constant.py,sha256=7UVY4ldYhmQMHUdgQ_sgZmzcQ0xxYxpBunqSZ-XJZ8U,42713 -charset_normalizer/legacy.py,sha256=SZE_AJujOYB1y9Y3-FkFOW9Ye4H1EHTzPbZR8DEsgl8,2287 -charset_normalizer/md.cpython-310-x86_64-linux-gnu.so,sha256=OvQ2-71Og4aDOkqaxSTifEeL3VPO4vstaP0dURjqm4o,16120 -charset_normalizer/md.py,sha256=-_oN3h3_X99nkFfqamD3yu45DC_wfk5odH0Tr_CQiXs,20145 -charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so,sha256=9r0ci-qeuMjImCAqm8ed7GRuU8eT29nV8K96MBiildA,276736 -charset_normalizer/models.py,sha256=lKXhOnIPtiakbK3i__J9wpOfzx3JDTKj7Dn3Rg0VaRI,12394 -charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer/utils.py,sha256=sTejPgrdlNsKNucZfJCxJ95lMTLA0ShHLLE3n5wpT9Q,12170 -charset_normalizer/version.py,sha256=koLXlDwKAU5IlYP4qkOpFnsncn74hlphHAlBhlDKzyw,115 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL deleted file mode 100644 index 0d2ee0c..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.1.0) -Root-Is-Purelib: false -Tag: cp310-cp310-manylinux_2_17_x86_64 -Tag: cp310-cp310-manylinux2014_x86_64 - diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt deleted file mode 100644 index ec92012..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -normalizer = charset_normalizer:cli.cli_detect diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE deleted file mode 100644 index 9725772..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt deleted file mode 100644 index 66958f0..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -charset_normalizer diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py deleted file mode 100644 index 0d3a379..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Charset-Normalizer -~~~~~~~~~~~~~~ -The Real First Universal Charset Detector. -A library that helps you read text from an unknown charset encoding. -Motivated by chardet, This package is trying to resolve the issue by taking a new approach. -All IANA character set names for which the Python core library provides codecs are supported. - -Basic usage: - >>> from charset_normalizer import from_bytes - >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) - >>> best_guess = results.best() - >>> str(best_guess) - 'Bсеки човек има право на образование. Oбразованието!' - -Others methods and usages are available - see the full documentation -at . -:copyright: (c) 2021 by Ahmed TAHRI -:license: MIT, see LICENSE for more details. -""" - -from __future__ import annotations - -import logging - -from .api import from_bytes, from_fp, from_path, is_binary -from .legacy import detect -from .models import CharsetMatch, CharsetMatches -from .utils import set_logging_handler -from .version import VERSION, __version__ - -__all__ = ( - "from_fp", - "from_path", - "from_bytes", - "is_binary", - "detect", - "CharsetMatch", - "CharsetMatches", - "__version__", - "VERSION", - "set_logging_handler", -) - -# Attach a NullHandler to the top level logger by default -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library - -logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__main__.py b/venv/lib/python3.10/site-packages/charset_normalizer/__main__.py deleted file mode 100644 index e0e76f7..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -from __future__ import annotations - -from .cli import cli_detect - -if __name__ == "__main__": - cli_detect() diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 8059901..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 62e7c20..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc deleted file mode 100644 index fe2119b..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc deleted file mode 100644 index 6aedc55..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc deleted file mode 100644 index 9f7fa44..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc deleted file mode 100644 index aabc670..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc deleted file mode 100644 index 7d2f83a..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 1e411b8..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 3bda656..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc deleted file mode 100644 index 9c55dd1..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/api.py b/venv/lib/python3.10/site-packages/charset_normalizer/api.py deleted file mode 100644 index 2c8c061..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/api.py +++ /dev/null @@ -1,668 +0,0 @@ -from __future__ import annotations - -import logging -from os import PathLike -from typing import BinaryIO - -from .cd import ( - coherence_ratio, - encoding_languages, - mb_encoding_languages, - merge_coherence_ratios, -) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE -from .md import mess_ratio -from .models import CharsetMatch, CharsetMatches -from .utils import ( - any_specified_encoding, - cut_sequence_chunks, - iana_name, - identify_sig_or_bom, - is_cp_similar, - is_multi_byte_encoding, - should_strip_sig_or_bom, -) - -logger = logging.getLogger("charset_normalizer") -explain_handler = logging.StreamHandler() -explain_handler.setFormatter( - logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") -) - - -def from_bytes( - sequences: bytes | bytearray, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.2, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Given a raw bytes sequence, return the best possibles charset usable to render str objects. - If there is no results, it is a strong indicator that the source is binary/not text. - By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. - And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. - - The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page - but never take it for granted. Can improve the performance. - - You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that - purpose. - - This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. - By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' - toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. - Custom logging format and handler can be set manually. - """ - - if not isinstance(sequences, (bytearray, bytes)): - raise TypeError( - "Expected object of type bytes or bytearray, got: {}".format( - type(sequences) - ) - ) - - if explain: - previous_logger_level: int = logger.level - logger.addHandler(explain_handler) - logger.setLevel(TRACE) - - length: int = len(sequences) - - if length == 0: - logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level or logging.WARNING) - return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) - - if cp_isolation is not None: - logger.log( - TRACE, - "cp_isolation is set. use this flag for debugging purpose. " - "limited list of encoding allowed : %s.", - ", ".join(cp_isolation), - ) - cp_isolation = [iana_name(cp, False) for cp in cp_isolation] - else: - cp_isolation = [] - - if cp_exclusion is not None: - logger.log( - TRACE, - "cp_exclusion is set. use this flag for debugging purpose. " - "limited list of encoding excluded : %s.", - ", ".join(cp_exclusion), - ) - cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] - else: - cp_exclusion = [] - - if length <= (chunk_size * steps): - logger.log( - TRACE, - "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", - steps, - chunk_size, - length, - ) - steps = 1 - chunk_size = length - - if steps > 1 and length / steps < chunk_size: - chunk_size = int(length / steps) - - is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE - is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE - - if is_too_small_sequence: - logger.log( - TRACE, - "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( - length - ), - ) - elif is_too_large_sequence: - logger.log( - TRACE, - "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( - length - ), - ) - - prioritized_encodings: list[str] = [] - - specified_encoding: str | None = ( - any_specified_encoding(sequences) if preemptive_behaviour else None - ) - - if specified_encoding is not None: - prioritized_encodings.append(specified_encoding) - logger.log( - TRACE, - "Detected declarative mark in sequence. Priority +1 given for %s.", - specified_encoding, - ) - - tested: set[str] = set() - tested_but_hard_failure: list[str] = [] - tested_but_soft_failure: list[str] = [] - - fallback_ascii: CharsetMatch | None = None - fallback_u8: CharsetMatch | None = None - fallback_specified: CharsetMatch | None = None - - results: CharsetMatches = CharsetMatches() - - early_stop_results: CharsetMatches = CharsetMatches() - - sig_encoding, sig_payload = identify_sig_or_bom(sequences) - - if sig_encoding is not None: - prioritized_encodings.append(sig_encoding) - logger.log( - TRACE, - "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", - len(sig_payload), - sig_encoding, - ) - - prioritized_encodings.append("ascii") - - if "utf_8" not in prioritized_encodings: - prioritized_encodings.append("utf_8") - - for encoding_iana in prioritized_encodings + IANA_SUPPORTED: - if cp_isolation and encoding_iana not in cp_isolation: - continue - - if cp_exclusion and encoding_iana in cp_exclusion: - continue - - if encoding_iana in tested: - continue - - tested.add(encoding_iana) - - decoded_payload: str | None = None - bom_or_sig_available: bool = sig_encoding == encoding_iana - strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( - encoding_iana - ) - - if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", - encoding_iana, - ) - continue - if encoding_iana in {"utf_7"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", - encoding_iana, - ) - continue - - try: - is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) - except (ModuleNotFoundError, ImportError): - logger.log( - TRACE, - "Encoding %s does not provide an IncrementalDecoder", - encoding_iana, - ) - continue - - try: - if is_too_large_sequence and is_multi_byte_decoder is False: - str( - ( - sequences[: int(50e4)] - if strip_sig_or_bom is False - else sequences[len(sig_payload) : int(50e4)] - ), - encoding=encoding_iana, - ) - else: - decoded_payload = str( - ( - sequences - if strip_sig_or_bom is False - else sequences[len(sig_payload) :] - ), - encoding=encoding_iana, - ) - except (UnicodeDecodeError, LookupError) as e: - if not isinstance(e, LookupError): - logger.log( - TRACE, - "Code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - similar_soft_failure_test: bool = False - - for encoding_soft_failed in tested_but_soft_failure: - if is_cp_similar(encoding_iana, encoding_soft_failed): - similar_soft_failure_test = True - break - - if similar_soft_failure_test: - logger.log( - TRACE, - "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", - encoding_iana, - encoding_soft_failed, - ) - continue - - r_ = range( - 0 if not bom_or_sig_available else len(sig_payload), - length, - int(length / steps), - ) - - multi_byte_bonus: bool = ( - is_multi_byte_decoder - and decoded_payload is not None - and len(decoded_payload) < length - ) - - if multi_byte_bonus: - logger.log( - TRACE, - "Code page %s is a multi byte encoding table and it appear that at least one character " - "was encoded using n-bytes.", - encoding_iana, - ) - - max_chunk_gave_up: int = int(len(r_) / 4) - - max_chunk_gave_up = max(max_chunk_gave_up, 2) - early_stop_count: int = 0 - lazy_str_hard_failure = False - - md_chunks: list[str] = [] - md_ratios = [] - - try: - for chunk in cut_sequence_chunks( - sequences, - encoding_iana, - r_, - chunk_size, - bom_or_sig_available, - strip_sig_or_bom, - sig_payload, - is_multi_byte_decoder, - decoded_payload, - ): - md_chunks.append(chunk) - - md_ratios.append( - mess_ratio( - chunk, - threshold, - explain is True and 1 <= len(cp_isolation) <= 2, - ) - ) - - if md_ratios[-1] >= threshold: - early_stop_count += 1 - - if (early_stop_count >= max_chunk_gave_up) or ( - bom_or_sig_available and strip_sig_or_bom is False - ): - break - except ( - UnicodeDecodeError - ) as e: # Lazy str loading may have missed something there - logger.log( - TRACE, - "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - early_stop_count = max_chunk_gave_up - lazy_str_hard_failure = True - - # We might want to check the sequence again with the whole content - # Only if initial MD tests passes - if ( - not lazy_str_hard_failure - and is_too_large_sequence - and not is_multi_byte_decoder - ): - try: - sequences[int(50e3) :].decode(encoding_iana, errors="strict") - except UnicodeDecodeError as e: - logger.log( - TRACE, - "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 - if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: - tested_but_soft_failure.append(encoding_iana) - logger.log( - TRACE, - "%s was excluded because of initial chaos probing. Gave up %i time(s). " - "Computed mean chaos is %f %%.", - encoding_iana, - early_stop_count, - round(mean_mess_ratio * 100, ndigits=3), - ) - # Preparing those fallbacks in case we got nothing. - if ( - enable_fallback - and encoding_iana in ["ascii", "utf_8", specified_encoding] - and not lazy_str_hard_failure - ): - fallback_entry = CharsetMatch( - sequences, - encoding_iana, - threshold, - False, - [], - decoded_payload, - preemptive_declaration=specified_encoding, - ) - if encoding_iana == specified_encoding: - fallback_specified = fallback_entry - elif encoding_iana == "ascii": - fallback_ascii = fallback_entry - else: - fallback_u8 = fallback_entry - continue - - logger.log( - TRACE, - "%s passed initial chaos probing. Mean measured chaos is %f %%", - encoding_iana, - round(mean_mess_ratio * 100, ndigits=3), - ) - - if not is_multi_byte_decoder: - target_languages: list[str] = encoding_languages(encoding_iana) - else: - target_languages = mb_encoding_languages(encoding_iana) - - if target_languages: - logger.log( - TRACE, - "{} should target any language(s) of {}".format( - encoding_iana, str(target_languages) - ), - ) - - cd_ratios = [] - - # We shall skip the CD when its about ASCII - # Most of the time its not relevant to run "language-detection" on it. - if encoding_iana != "ascii": - for chunk in md_chunks: - chunk_languages = coherence_ratio( - chunk, - language_threshold, - ",".join(target_languages) if target_languages else None, - ) - - cd_ratios.append(chunk_languages) - - cd_ratios_merged = merge_coherence_ratios(cd_ratios) - - if cd_ratios_merged: - logger.log( - TRACE, - "We detected language {} using {}".format( - cd_ratios_merged, encoding_iana - ), - ) - - current_match = CharsetMatch( - sequences, - encoding_iana, - mean_mess_ratio, - bom_or_sig_available, - cd_ratios_merged, - ( - decoded_payload - if ( - is_too_large_sequence is False - or encoding_iana in [specified_encoding, "ascii", "utf_8"] - ) - else None - ), - preemptive_declaration=specified_encoding, - ) - - results.append(current_match) - - if ( - encoding_iana in [specified_encoding, "ascii", "utf_8"] - and mean_mess_ratio < 0.1 - ): - # If md says nothing to worry about, then... stop immediately! - if mean_mess_ratio == 0.0: - logger.debug( - "Encoding detection: %s is most likely the one.", - current_match.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([current_match]) - - early_stop_results.append(current_match) - - if ( - len(early_stop_results) - and (specified_encoding is None or specified_encoding in tested) - and "ascii" in tested - and "utf_8" in tested - ): - probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] - logger.debug( - "Encoding detection: %s is most likely the one.", - probable_result.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return CharsetMatches([probable_result]) - - if encoding_iana == sig_encoding: - logger.debug( - "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " - "the beginning of the sequence.", - encoding_iana, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if len(results) == 0: - if fallback_u8 or fallback_ascii or fallback_specified: - logger.log( - TRACE, - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", - ) - - if fallback_specified: - logger.debug( - "Encoding detection: %s will be used as a fallback match", - fallback_specified.encoding, - ) - results.append(fallback_specified) - elif ( - (fallback_u8 and fallback_ascii is None) - or ( - fallback_u8 - and fallback_ascii - and fallback_u8.fingerprint != fallback_ascii.fingerprint - ) - or (fallback_u8 is not None) - ): - logger.debug("Encoding detection: utf_8 will be used as a fallback match") - results.append(fallback_u8) - elif fallback_ascii: - logger.debug("Encoding detection: ascii will be used as a fallback match") - results.append(fallback_ascii) - - if results: - logger.debug( - "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", - results.best().encoding, # type: ignore - len(results) - 1, - ) - else: - logger.debug("Encoding detection: Unable to determine any suitable charset.") - - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return results - - -def from_fp( - fp: BinaryIO, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but using a file pointer that is already ready. - Will not close the file pointer. - """ - return from_bytes( - fp.read(), - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def from_path( - path: str | bytes | PathLike, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. - Can raise IOError. - """ - with open(path, "rb") as fp: - return from_fp( - fp, - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def is_binary( - fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = False, -) -> bool: - """ - Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. - Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match - are disabled to be stricter around ASCII-compatible but unlikely to be a string. - """ - if isinstance(fp_or_path_or_payload, (str, PathLike)): - guesses = from_path( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - elif isinstance( - fp_or_path_or_payload, - ( - bytes, - bytearray, - ), - ): - guesses = from_bytes( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - else: - guesses = from_fp( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - - return not guesses diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cd.py b/venv/lib/python3.10/site-packages/charset_normalizer/cd.py deleted file mode 100644 index 71a3ed5..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/cd.py +++ /dev/null @@ -1,395 +0,0 @@ -from __future__ import annotations - -import importlib -from codecs import IncrementalDecoder -from collections import Counter -from functools import lru_cache -from typing import Counter as TypeCounter - -from .constant import ( - FREQUENCIES, - KO_NAMES, - LANGUAGE_SUPPORTED_COUNT, - TOO_SMALL_SEQUENCE, - ZH_NAMES, -) -from .md import is_suspiciously_successive_range -from .models import CoherenceMatches -from .utils import ( - is_accentuated, - is_latin, - is_multi_byte_encoding, - is_unicode_range_secondary, - unicode_range, -) - - -def encoding_unicode_range(iana_name: str) -> list[str]: - """ - Return associated unicode ranges in a single byte code page. - """ - if is_multi_byte_encoding(iana_name): - raise OSError("Function not supported on multi-byte code page") - - decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder - - p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: dict[str, int] = {} - character_count: int = 0 - - for i in range(0x40, 0xFF): - chunk: str = p.decode(bytes([i])) - - if chunk: - character_range: str | None = unicode_range(chunk) - - if character_range is None: - continue - - if is_unicode_range_secondary(character_range) is False: - if character_range not in seen_ranges: - seen_ranges[character_range] = 0 - seen_ranges[character_range] += 1 - character_count += 1 - - return sorted( - [ - character_range - for character_range in seen_ranges - if seen_ranges[character_range] / character_count >= 0.15 - ] - ) - - -def unicode_range_languages(primary_range: str) -> list[str]: - """ - Return inferred languages used with a unicode range. - """ - languages: list[str] = [] - - for language, characters in FREQUENCIES.items(): - for character in characters: - if unicode_range(character) == primary_range: - languages.append(language) - break - - return languages - - -@lru_cache() -def encoding_languages(iana_name: str) -> list[str]: - """ - Single-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - unicode_ranges: list[str] = encoding_unicode_range(iana_name) - primary_range: str | None = None - - for specified_range in unicode_ranges: - if "Latin" not in specified_range: - primary_range = specified_range - break - - if primary_range is None: - return ["Latin Based"] - - return unicode_range_languages(primary_range) - - -@lru_cache() -def mb_encoding_languages(iana_name: str) -> list[str]: - """ - Multi-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - if ( - iana_name.startswith("shift_") - or iana_name.startswith("iso2022_jp") - or iana_name.startswith("euc_j") - or iana_name == "cp932" - ): - return ["Japanese"] - if iana_name.startswith("gb") or iana_name in ZH_NAMES: - return ["Chinese"] - if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: - return ["Korean"] - - return [] - - -@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> tuple[bool, bool]: - """ - Determine main aspects from a supported language if it contains accents and if is pure Latin. - """ - target_have_accents: bool = False - target_pure_latin: bool = True - - for character in FREQUENCIES[language]: - if not target_have_accents and is_accentuated(character): - target_have_accents = True - if target_pure_latin and is_latin(character) is False: - target_pure_latin = False - - return target_have_accents, target_pure_latin - - -def alphabet_languages( - characters: list[str], ignore_non_latin: bool = False -) -> list[str]: - """ - Return associated languages associated to given characters. - """ - languages: list[tuple[str, float]] = [] - - source_have_accents = any(is_accentuated(character) for character in characters) - - for language, language_characters in FREQUENCIES.items(): - target_have_accents, target_pure_latin = get_target_features(language) - - if ignore_non_latin and target_pure_latin is False: - continue - - if target_have_accents is False and source_have_accents: - continue - - character_count: int = len(language_characters) - - character_match_count: int = len( - [c for c in language_characters if c in characters] - ) - - ratio: float = character_match_count / character_count - - if ratio >= 0.2: - languages.append((language, ratio)) - - languages = sorted(languages, key=lambda x: x[1], reverse=True) - - return [compatible_language[0] for compatible_language in languages] - - -def characters_popularity_compare( - language: str, ordered_characters: list[str] -) -> float: - """ - Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. - The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). - Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) - """ - if language not in FREQUENCIES: - raise ValueError(f"{language} not available") - - character_approved_count: int = 0 - FREQUENCIES_language_set = set(FREQUENCIES[language]) - - ordered_characters_count: int = len(ordered_characters) - target_language_characters_count: int = len(FREQUENCIES[language]) - - large_alphabet: bool = target_language_characters_count > 26 - - for character, character_rank in zip( - ordered_characters, range(0, ordered_characters_count) - ): - if character not in FREQUENCIES_language_set: - continue - - character_rank_in_language: int = FREQUENCIES[language].index(character) - expected_projection_ratio: float = ( - target_language_characters_count / ordered_characters_count - ) - character_rank_projection: int = int(character_rank * expected_projection_ratio) - - if ( - large_alphabet is False - and abs(character_rank_projection - character_rank_in_language) > 4 - ): - continue - - if ( - large_alphabet is True - and abs(character_rank_projection - character_rank_in_language) - < target_language_characters_count / 3 - ): - character_approved_count += 1 - continue - - characters_before_source: list[str] = FREQUENCIES[language][ - 0:character_rank_in_language - ] - characters_after_source: list[str] = FREQUENCIES[language][ - character_rank_in_language: - ] - characters_before: list[str] = ordered_characters[0:character_rank] - characters_after: list[str] = ordered_characters[character_rank:] - - before_match_count: int = len( - set(characters_before) & set(characters_before_source) - ) - - after_match_count: int = len( - set(characters_after) & set(characters_after_source) - ) - - if len(characters_before_source) == 0 and before_match_count <= 4: - character_approved_count += 1 - continue - - if len(characters_after_source) == 0 and after_match_count <= 4: - character_approved_count += 1 - continue - - if ( - before_match_count / len(characters_before_source) >= 0.4 - or after_match_count / len(characters_after_source) >= 0.4 - ): - character_approved_count += 1 - continue - - return character_approved_count / len(ordered_characters) - - -def alpha_unicode_split(decoded_sequence: str) -> list[str]: - """ - Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. - Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; - One containing the latin letters and the other hebrew. - """ - layers: dict[str, str] = {} - - for character in decoded_sequence: - if character.isalpha() is False: - continue - - character_range: str | None = unicode_range(character) - - if character_range is None: - continue - - layer_target_range: str | None = None - - for discovered_range in layers: - if ( - is_suspiciously_successive_range(discovered_range, character_range) - is False - ): - layer_target_range = discovered_range - break - - if layer_target_range is None: - layer_target_range = character_range - - if layer_target_range not in layers: - layers[layer_target_range] = character.lower() - continue - - layers[layer_target_range] += character.lower() - - return list(layers.values()) - - -def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: - """ - This function merge results previously given by the function coherence_ratio. - The return type is the same as coherence_ratio. - """ - per_language_ratios: dict[str, list[float]] = {} - for result in results: - for sub_result in result: - language, ratio = sub_result - if language not in per_language_ratios: - per_language_ratios[language] = [ratio] - continue - per_language_ratios[language].append(ratio) - - merge = [ - ( - language, - round( - sum(per_language_ratios[language]) / len(per_language_ratios[language]), - 4, - ), - ) - for language in per_language_ratios - ] - - return sorted(merge, key=lambda x: x[1], reverse=True) - - -def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: - """ - We shall NOT return "English—" in CoherenceMatches because it is an alternative - of "English". This function only keeps the best match and remove the em-dash in it. - """ - index_results: dict[str, list[float]] = dict() - - for result in results: - language, ratio = result - no_em_name: str = language.replace("—", "") - - if no_em_name not in index_results: - index_results[no_em_name] = [] - - index_results[no_em_name].append(ratio) - - if any(len(index_results[e]) > 1 for e in index_results): - filtered_results: CoherenceMatches = [] - - for language in index_results: - filtered_results.append((language, max(index_results[language]))) - - return filtered_results - - return results - - -@lru_cache(maxsize=2048) -def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None -) -> CoherenceMatches: - """ - Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. - A layer = Character extraction by alphabets/ranges. - """ - - results: list[tuple[str, float]] = [] - ignore_non_latin: bool = False - - sufficient_match_count: int = 0 - - lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] - if "Latin Based" in lg_inclusion_list: - ignore_non_latin = True - lg_inclusion_list.remove("Latin Based") - - for layer in alpha_unicode_split(decoded_sequence): - sequence_frequencies: TypeCounter[str] = Counter(layer) - most_common = sequence_frequencies.most_common() - - character_count: int = sum(o for c, o in most_common) - - if character_count <= TOO_SMALL_SEQUENCE: - continue - - popular_character_ordered: list[str] = [c for c, o in most_common] - - for language in lg_inclusion_list or alphabet_languages( - popular_character_ordered, ignore_non_latin - ): - ratio: float = characters_popularity_compare( - language, popular_character_ordered - ) - - if ratio < threshold: - continue - elif ratio >= 0.8: - sufficient_match_count += 1 - - results.append((language, round(ratio, 4))) - - if sufficient_match_count >= 3: - break - - return sorted( - filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True - ) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py deleted file mode 100644 index 543a5a4..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import annotations - -from .__main__ import cli_detect, query_yes_no - -__all__ = ( - "cli_detect", - "query_yes_no", -) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py deleted file mode 100644 index cb64156..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py +++ /dev/null @@ -1,381 +0,0 @@ -from __future__ import annotations - -import argparse -import sys -import typing -from json import dumps -from os.path import abspath, basename, dirname, join, realpath -from platform import python_version -from unicodedata import unidata_version - -import charset_normalizer.md as md_module -from charset_normalizer import from_fp -from charset_normalizer.models import CliDetectionResult -from charset_normalizer.version import __version__ - - -def query_yes_no(question: str, default: str = "yes") -> bool: - """Ask a yes/no question via input() and return their answer. - - "question" is a string that is presented to the user. - "default" is the presumed answer if the user just hits . - It must be "yes" (the default), "no" or None (meaning - an answer is required of the user). - - The "answer" return value is True for "yes" or False for "no". - - Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input - """ - valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} - if default is None: - prompt = " [y/n] " - elif default == "yes": - prompt = " [Y/n] " - elif default == "no": - prompt = " [y/N] " - else: - raise ValueError("invalid default answer: '%s'" % default) - - while True: - sys.stdout.write(question + prompt) - choice = input().lower() - if default is not None and choice == "": - return valid[default] - elif choice in valid: - return valid[choice] - else: - sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") - - -class FileType: - """Factory for creating file object types - - Instances of FileType are typically passed as type= arguments to the - ArgumentParser add_argument() method. - - Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. - - Backported from CPython 3.12 - """ - - def __init__( - self, - mode: str = "r", - bufsize: int = -1, - encoding: str | None = None, - errors: str | None = None, - ): - self._mode = mode - self._bufsize = bufsize - self._encoding = encoding - self._errors = errors - - def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg] - # the special argument "-" means sys.std{in,out} - if string == "-": - if "r" in self._mode: - return sys.stdin.buffer if "b" in self._mode else sys.stdin - elif any(c in self._mode for c in "wax"): - return sys.stdout.buffer if "b" in self._mode else sys.stdout - else: - msg = f'argument "-" with mode {self._mode}' - raise ValueError(msg) - - # all other arguments are used as file names - try: - return open(string, self._mode, self._bufsize, self._encoding, self._errors) - except OSError as e: - message = f"can't open '{string}': {e}" - raise argparse.ArgumentTypeError(message) - - def __repr__(self) -> str: - args = self._mode, self._bufsize - kwargs = [("encoding", self._encoding), ("errors", self._errors)] - args_str = ", ".join( - [repr(arg) for arg in args if arg != -1] - + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None] - ) - return f"{type(self).__name__}({args_str})" - - -def cli_detect(argv: list[str] | None = None) -> int: - """ - CLI assistant using ARGV and ArgumentParser - :param argv: - :return: 0 if everything is fine, anything else equal trouble - """ - parser = argparse.ArgumentParser( - description="The Real First Universal Charset Detector. " - "Discover originating encoding used on text file. " - "Normalize text to unicode." - ) - - parser.add_argument( - "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed" - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - dest="verbose", - help="Display complementary information about file if any. " - "Stdout will contain logs about the detection process.", - ) - parser.add_argument( - "-a", - "--with-alternative", - action="store_true", - default=False, - dest="alternatives", - help="Output complementary possibilities if any. Top-level JSON WILL be a list.", - ) - parser.add_argument( - "-n", - "--normalize", - action="store_true", - default=False, - dest="normalize", - help="Permit to normalize input file. If not set, program does not write anything.", - ) - parser.add_argument( - "-m", - "--minimal", - action="store_true", - default=False, - dest="minimal", - help="Only output the charset detected to STDOUT. Disabling JSON output.", - ) - parser.add_argument( - "-r", - "--replace", - action="store_true", - default=False, - dest="replace", - help="Replace file when trying to normalize it instead of creating a new one.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - dest="force", - help="Replace file without asking if you are sure, use this flag with caution.", - ) - parser.add_argument( - "-i", - "--no-preemptive", - action="store_true", - default=False, - dest="no_preemptive", - help="Disable looking at a charset declaration to hint the detector.", - ) - parser.add_argument( - "-t", - "--threshold", - action="store", - default=0.2, - type=float, - dest="threshold", - help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", - ) - parser.add_argument( - "--version", - action="version", - version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( - __version__, - python_version(), - unidata_version, - "OFF" if md_module.__file__.lower().endswith(".py") else "ON", - ), - help="Show version information and exit.", - ) - - args = parser.parse_args(argv) - - if args.replace is True and args.normalize is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --replace in addition of --normalize only.", file=sys.stderr) - return 1 - - if args.force is True and args.replace is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --force in addition of --replace only.", file=sys.stderr) - return 1 - - if args.threshold < 0.0 or args.threshold > 1.0: - if args.files: - for my_file in args.files: - my_file.close() - print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) - return 1 - - x_ = [] - - for my_file in args.files: - matches = from_fp( - my_file, - threshold=args.threshold, - explain=args.verbose, - preemptive_behaviour=args.no_preemptive is False, - ) - - best_guess = matches.best() - - if best_guess is None: - print( - 'Unable to identify originating encoding for "{}". {}'.format( - my_file.name, - ( - "Maybe try increasing maximum amount of chaos." - if args.threshold < 1.0 - else "" - ), - ), - file=sys.stderr, - ) - x_.append( - CliDetectionResult( - abspath(my_file.name), - None, - [], - [], - "Unknown", - [], - False, - 1.0, - 0.0, - None, - True, - ) - ) - else: - x_.append( - CliDetectionResult( - abspath(my_file.name), - best_guess.encoding, - best_guess.encoding_aliases, - [ - cp - for cp in best_guess.could_be_from_charset - if cp != best_guess.encoding - ], - best_guess.language, - best_guess.alphabets, - best_guess.bom, - best_guess.percent_chaos, - best_guess.percent_coherence, - None, - True, - ) - ) - - if len(matches) > 1 and args.alternatives: - for el in matches: - if el != best_guess: - x_.append( - CliDetectionResult( - abspath(my_file.name), - el.encoding, - el.encoding_aliases, - [ - cp - for cp in el.could_be_from_charset - if cp != el.encoding - ], - el.language, - el.alphabets, - el.bom, - el.percent_chaos, - el.percent_coherence, - None, - False, - ) - ) - - if args.normalize is True: - if best_guess.encoding.startswith("utf") is True: - print( - '"{}" file does not need to be normalized, as it already came from unicode.'.format( - my_file.name - ), - file=sys.stderr, - ) - if my_file.closed is False: - my_file.close() - continue - - dir_path = dirname(realpath(my_file.name)) - file_name = basename(realpath(my_file.name)) - - o_: list[str] = file_name.split(".") - - if args.replace is False: - o_.insert(-1, best_guess.encoding) - if my_file.closed is False: - my_file.close() - elif ( - args.force is False - and query_yes_no( - 'Are you sure to normalize "{}" by replacing it ?'.format( - my_file.name - ), - "no", - ) - is False - ): - if my_file.closed is False: - my_file.close() - continue - - try: - x_[0].unicode_path = join(dir_path, ".".join(o_)) - - with open(x_[0].unicode_path, "wb") as fp: - fp.write(best_guess.output()) - except OSError as e: - print(str(e), file=sys.stderr) - if my_file.closed is False: - my_file.close() - return 2 - - if my_file.closed is False: - my_file.close() - - if args.minimal is False: - print( - dumps( - [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, - ensure_ascii=True, - indent=4, - ) - ) - else: - for my_file in args.files: - print( - ", ".join( - [ - el.encoding or "undefined" - for el in x_ - if el.path == abspath(my_file.name) - ] - ) - ) - - return 0 - - -if __name__ == "__main__": - cli_detect() diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 1a82096..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index fe1e3da..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/constant.py b/venv/lib/python3.10/site-packages/charset_normalizer/constant.py deleted file mode 100644 index cc71a01..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/constant.py +++ /dev/null @@ -1,2015 +0,0 @@ -from __future__ import annotations - -from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE -from encodings.aliases import aliases -from re import IGNORECASE -from re import compile as re_compile - -# Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: dict[str, bytes | list[bytes]] = { - "utf_8": BOM_UTF8, - "utf_7": [ - b"\x2b\x2f\x76\x38", - b"\x2b\x2f\x76\x39", - b"\x2b\x2f\x76\x2b", - b"\x2b\x2f\x76\x2f", - b"\x2b\x2f\x76\x38\x2d", - ], - "gb18030": b"\x84\x31\x95\x33", - "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], - "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], -} - -TOO_SMALL_SEQUENCE: int = 32 -TOO_BIG_SEQUENCE: int = int(10e6) - -UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 - -# Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: dict[str, range] = { - "Control character": range(32), - "Basic Latin": range(32, 128), - "Latin-1 Supplement": range(128, 256), - "Latin Extended-A": range(256, 384), - "Latin Extended-B": range(384, 592), - "IPA Extensions": range(592, 688), - "Spacing Modifier Letters": range(688, 768), - "Combining Diacritical Marks": range(768, 880), - "Greek and Coptic": range(880, 1024), - "Cyrillic": range(1024, 1280), - "Cyrillic Supplement": range(1280, 1328), - "Armenian": range(1328, 1424), - "Hebrew": range(1424, 1536), - "Arabic": range(1536, 1792), - "Syriac": range(1792, 1872), - "Arabic Supplement": range(1872, 1920), - "Thaana": range(1920, 1984), - "NKo": range(1984, 2048), - "Samaritan": range(2048, 2112), - "Mandaic": range(2112, 2144), - "Syriac Supplement": range(2144, 2160), - "Arabic Extended-B": range(2160, 2208), - "Arabic Extended-A": range(2208, 2304), - "Devanagari": range(2304, 2432), - "Bengali": range(2432, 2560), - "Gurmukhi": range(2560, 2688), - "Gujarati": range(2688, 2816), - "Oriya": range(2816, 2944), - "Tamil": range(2944, 3072), - "Telugu": range(3072, 3200), - "Kannada": range(3200, 3328), - "Malayalam": range(3328, 3456), - "Sinhala": range(3456, 3584), - "Thai": range(3584, 3712), - "Lao": range(3712, 3840), - "Tibetan": range(3840, 4096), - "Myanmar": range(4096, 4256), - "Georgian": range(4256, 4352), - "Hangul Jamo": range(4352, 4608), - "Ethiopic": range(4608, 4992), - "Ethiopic Supplement": range(4992, 5024), - "Cherokee": range(5024, 5120), - "Unified Canadian Aboriginal Syllabics": range(5120, 5760), - "Ogham": range(5760, 5792), - "Runic": range(5792, 5888), - "Tagalog": range(5888, 5920), - "Hanunoo": range(5920, 5952), - "Buhid": range(5952, 5984), - "Tagbanwa": range(5984, 6016), - "Khmer": range(6016, 6144), - "Mongolian": range(6144, 6320), - "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), - "Limbu": range(6400, 6480), - "Tai Le": range(6480, 6528), - "New Tai Lue": range(6528, 6624), - "Khmer Symbols": range(6624, 6656), - "Buginese": range(6656, 6688), - "Tai Tham": range(6688, 6832), - "Combining Diacritical Marks Extended": range(6832, 6912), - "Balinese": range(6912, 7040), - "Sundanese": range(7040, 7104), - "Batak": range(7104, 7168), - "Lepcha": range(7168, 7248), - "Ol Chiki": range(7248, 7296), - "Cyrillic Extended-C": range(7296, 7312), - "Georgian Extended": range(7312, 7360), - "Sundanese Supplement": range(7360, 7376), - "Vedic Extensions": range(7376, 7424), - "Phonetic Extensions": range(7424, 7552), - "Phonetic Extensions Supplement": range(7552, 7616), - "Combining Diacritical Marks Supplement": range(7616, 7680), - "Latin Extended Additional": range(7680, 7936), - "Greek Extended": range(7936, 8192), - "General Punctuation": range(8192, 8304), - "Superscripts and Subscripts": range(8304, 8352), - "Currency Symbols": range(8352, 8400), - "Combining Diacritical Marks for Symbols": range(8400, 8448), - "Letterlike Symbols": range(8448, 8528), - "Number Forms": range(8528, 8592), - "Arrows": range(8592, 8704), - "Mathematical Operators": range(8704, 8960), - "Miscellaneous Technical": range(8960, 9216), - "Control Pictures": range(9216, 9280), - "Optical Character Recognition": range(9280, 9312), - "Enclosed Alphanumerics": range(9312, 9472), - "Box Drawing": range(9472, 9600), - "Block Elements": range(9600, 9632), - "Geometric Shapes": range(9632, 9728), - "Miscellaneous Symbols": range(9728, 9984), - "Dingbats": range(9984, 10176), - "Miscellaneous Mathematical Symbols-A": range(10176, 10224), - "Supplemental Arrows-A": range(10224, 10240), - "Braille Patterns": range(10240, 10496), - "Supplemental Arrows-B": range(10496, 10624), - "Miscellaneous Mathematical Symbols-B": range(10624, 10752), - "Supplemental Mathematical Operators": range(10752, 11008), - "Miscellaneous Symbols and Arrows": range(11008, 11264), - "Glagolitic": range(11264, 11360), - "Latin Extended-C": range(11360, 11392), - "Coptic": range(11392, 11520), - "Georgian Supplement": range(11520, 11568), - "Tifinagh": range(11568, 11648), - "Ethiopic Extended": range(11648, 11744), - "Cyrillic Extended-A": range(11744, 11776), - "Supplemental Punctuation": range(11776, 11904), - "CJK Radicals Supplement": range(11904, 12032), - "Kangxi Radicals": range(12032, 12256), - "Ideographic Description Characters": range(12272, 12288), - "CJK Symbols and Punctuation": range(12288, 12352), - "Hiragana": range(12352, 12448), - "Katakana": range(12448, 12544), - "Bopomofo": range(12544, 12592), - "Hangul Compatibility Jamo": range(12592, 12688), - "Kanbun": range(12688, 12704), - "Bopomofo Extended": range(12704, 12736), - "CJK Strokes": range(12736, 12784), - "Katakana Phonetic Extensions": range(12784, 12800), - "Enclosed CJK Letters and Months": range(12800, 13056), - "CJK Compatibility": range(13056, 13312), - "CJK Unified Ideographs Extension A": range(13312, 19904), - "Yijing Hexagram Symbols": range(19904, 19968), - "CJK Unified Ideographs": range(19968, 40960), - "Yi Syllables": range(40960, 42128), - "Yi Radicals": range(42128, 42192), - "Lisu": range(42192, 42240), - "Vai": range(42240, 42560), - "Cyrillic Extended-B": range(42560, 42656), - "Bamum": range(42656, 42752), - "Modifier Tone Letters": range(42752, 42784), - "Latin Extended-D": range(42784, 43008), - "Syloti Nagri": range(43008, 43056), - "Common Indic Number Forms": range(43056, 43072), - "Phags-pa": range(43072, 43136), - "Saurashtra": range(43136, 43232), - "Devanagari Extended": range(43232, 43264), - "Kayah Li": range(43264, 43312), - "Rejang": range(43312, 43360), - "Hangul Jamo Extended-A": range(43360, 43392), - "Javanese": range(43392, 43488), - "Myanmar Extended-B": range(43488, 43520), - "Cham": range(43520, 43616), - "Myanmar Extended-A": range(43616, 43648), - "Tai Viet": range(43648, 43744), - "Meetei Mayek Extensions": range(43744, 43776), - "Ethiopic Extended-A": range(43776, 43824), - "Latin Extended-E": range(43824, 43888), - "Cherokee Supplement": range(43888, 43968), - "Meetei Mayek": range(43968, 44032), - "Hangul Syllables": range(44032, 55216), - "Hangul Jamo Extended-B": range(55216, 55296), - "High Surrogates": range(55296, 56192), - "High Private Use Surrogates": range(56192, 56320), - "Low Surrogates": range(56320, 57344), - "Private Use Area": range(57344, 63744), - "CJK Compatibility Ideographs": range(63744, 64256), - "Alphabetic Presentation Forms": range(64256, 64336), - "Arabic Presentation Forms-A": range(64336, 65024), - "Variation Selectors": range(65024, 65040), - "Vertical Forms": range(65040, 65056), - "Combining Half Marks": range(65056, 65072), - "CJK Compatibility Forms": range(65072, 65104), - "Small Form Variants": range(65104, 65136), - "Arabic Presentation Forms-B": range(65136, 65280), - "Halfwidth and Fullwidth Forms": range(65280, 65520), - "Specials": range(65520, 65536), - "Linear B Syllabary": range(65536, 65664), - "Linear B Ideograms": range(65664, 65792), - "Aegean Numbers": range(65792, 65856), - "Ancient Greek Numbers": range(65856, 65936), - "Ancient Symbols": range(65936, 66000), - "Phaistos Disc": range(66000, 66048), - "Lycian": range(66176, 66208), - "Carian": range(66208, 66272), - "Coptic Epact Numbers": range(66272, 66304), - "Old Italic": range(66304, 66352), - "Gothic": range(66352, 66384), - "Old Permic": range(66384, 66432), - "Ugaritic": range(66432, 66464), - "Old Persian": range(66464, 66528), - "Deseret": range(66560, 66640), - "Shavian": range(66640, 66688), - "Osmanya": range(66688, 66736), - "Osage": range(66736, 66816), - "Elbasan": range(66816, 66864), - "Caucasian Albanian": range(66864, 66928), - "Vithkuqi": range(66928, 67008), - "Linear A": range(67072, 67456), - "Latin Extended-F": range(67456, 67520), - "Cypriot Syllabary": range(67584, 67648), - "Imperial Aramaic": range(67648, 67680), - "Palmyrene": range(67680, 67712), - "Nabataean": range(67712, 67760), - "Hatran": range(67808, 67840), - "Phoenician": range(67840, 67872), - "Lydian": range(67872, 67904), - "Meroitic Hieroglyphs": range(67968, 68000), - "Meroitic Cursive": range(68000, 68096), - "Kharoshthi": range(68096, 68192), - "Old South Arabian": range(68192, 68224), - "Old North Arabian": range(68224, 68256), - "Manichaean": range(68288, 68352), - "Avestan": range(68352, 68416), - "Inscriptional Parthian": range(68416, 68448), - "Inscriptional Pahlavi": range(68448, 68480), - "Psalter Pahlavi": range(68480, 68528), - "Old Turkic": range(68608, 68688), - "Old Hungarian": range(68736, 68864), - "Hanifi Rohingya": range(68864, 68928), - "Rumi Numeral Symbols": range(69216, 69248), - "Yezidi": range(69248, 69312), - "Arabic Extended-C": range(69312, 69376), - "Old Sogdian": range(69376, 69424), - "Sogdian": range(69424, 69488), - "Old Uyghur": range(69488, 69552), - "Chorasmian": range(69552, 69600), - "Elymaic": range(69600, 69632), - "Brahmi": range(69632, 69760), - "Kaithi": range(69760, 69840), - "Sora Sompeng": range(69840, 69888), - "Chakma": range(69888, 69968), - "Mahajani": range(69968, 70016), - "Sharada": range(70016, 70112), - "Sinhala Archaic Numbers": range(70112, 70144), - "Khojki": range(70144, 70224), - "Multani": range(70272, 70320), - "Khudawadi": range(70320, 70400), - "Grantha": range(70400, 70528), - "Newa": range(70656, 70784), - "Tirhuta": range(70784, 70880), - "Siddham": range(71040, 71168), - "Modi": range(71168, 71264), - "Mongolian Supplement": range(71264, 71296), - "Takri": range(71296, 71376), - "Ahom": range(71424, 71504), - "Dogra": range(71680, 71760), - "Warang Citi": range(71840, 71936), - "Dives Akuru": range(71936, 72032), - "Nandinagari": range(72096, 72192), - "Zanabazar Square": range(72192, 72272), - "Soyombo": range(72272, 72368), - "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), - "Pau Cin Hau": range(72384, 72448), - "Devanagari Extended-A": range(72448, 72544), - "Bhaiksuki": range(72704, 72816), - "Marchen": range(72816, 72896), - "Masaram Gondi": range(72960, 73056), - "Gunjala Gondi": range(73056, 73136), - "Makasar": range(73440, 73472), - "Kawi": range(73472, 73568), - "Lisu Supplement": range(73648, 73664), - "Tamil Supplement": range(73664, 73728), - "Cuneiform": range(73728, 74752), - "Cuneiform Numbers and Punctuation": range(74752, 74880), - "Early Dynastic Cuneiform": range(74880, 75088), - "Cypro-Minoan": range(77712, 77824), - "Egyptian Hieroglyphs": range(77824, 78896), - "Egyptian Hieroglyph Format Controls": range(78896, 78944), - "Anatolian Hieroglyphs": range(82944, 83584), - "Bamum Supplement": range(92160, 92736), - "Mro": range(92736, 92784), - "Tangsa": range(92784, 92880), - "Bassa Vah": range(92880, 92928), - "Pahawh Hmong": range(92928, 93072), - "Medefaidrin": range(93760, 93856), - "Miao": range(93952, 94112), - "Ideographic Symbols and Punctuation": range(94176, 94208), - "Tangut": range(94208, 100352), - "Tangut Components": range(100352, 101120), - "Khitan Small Script": range(101120, 101632), - "Tangut Supplement": range(101632, 101760), - "Kana Extended-B": range(110576, 110592), - "Kana Supplement": range(110592, 110848), - "Kana Extended-A": range(110848, 110896), - "Small Kana Extension": range(110896, 110960), - "Nushu": range(110960, 111360), - "Duployan": range(113664, 113824), - "Shorthand Format Controls": range(113824, 113840), - "Znamenny Musical Notation": range(118528, 118736), - "Byzantine Musical Symbols": range(118784, 119040), - "Musical Symbols": range(119040, 119296), - "Ancient Greek Musical Notation": range(119296, 119376), - "Kaktovik Numerals": range(119488, 119520), - "Mayan Numerals": range(119520, 119552), - "Tai Xuan Jing Symbols": range(119552, 119648), - "Counting Rod Numerals": range(119648, 119680), - "Mathematical Alphanumeric Symbols": range(119808, 120832), - "Sutton SignWriting": range(120832, 121520), - "Latin Extended-G": range(122624, 122880), - "Glagolitic Supplement": range(122880, 122928), - "Cyrillic Extended-D": range(122928, 123024), - "Nyiakeng Puachue Hmong": range(123136, 123216), - "Toto": range(123536, 123584), - "Wancho": range(123584, 123648), - "Nag Mundari": range(124112, 124160), - "Ethiopic Extended-B": range(124896, 124928), - "Mende Kikakui": range(124928, 125152), - "Adlam": range(125184, 125280), - "Indic Siyaq Numbers": range(126064, 126144), - "Ottoman Siyaq Numbers": range(126208, 126288), - "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), - "Mahjong Tiles": range(126976, 127024), - "Domino Tiles": range(127024, 127136), - "Playing Cards": range(127136, 127232), - "Enclosed Alphanumeric Supplement": range(127232, 127488), - "Enclosed Ideographic Supplement": range(127488, 127744), - "Miscellaneous Symbols and Pictographs": range(127744, 128512), - "Emoticons range(Emoji)": range(128512, 128592), - "Ornamental Dingbats": range(128592, 128640), - "Transport and Map Symbols": range(128640, 128768), - "Alchemical Symbols": range(128768, 128896), - "Geometric Shapes Extended": range(128896, 129024), - "Supplemental Arrows-C": range(129024, 129280), - "Supplemental Symbols and Pictographs": range(129280, 129536), - "Chess Symbols": range(129536, 129648), - "Symbols and Pictographs Extended-A": range(129648, 129792), - "Symbols for Legacy Computing": range(129792, 130048), - "CJK Unified Ideographs Extension B": range(131072, 173792), - "CJK Unified Ideographs Extension C": range(173824, 177984), - "CJK Unified Ideographs Extension D": range(177984, 178208), - "CJK Unified Ideographs Extension E": range(178208, 183984), - "CJK Unified Ideographs Extension F": range(183984, 191472), - "CJK Compatibility Ideographs Supplement": range(194560, 195104), - "CJK Unified Ideographs Extension G": range(196608, 201552), - "CJK Unified Ideographs Extension H": range(201552, 205744), - "Tags": range(917504, 917632), - "Variation Selectors Supplement": range(917760, 918000), - "Supplementary Private Use Area-A": range(983040, 1048576), - "Supplementary Private Use Area-B": range(1048576, 1114112), -} - - -UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ - "Supplement", - "Extended", - "Extensions", - "Modifier", - "Marks", - "Punctuation", - "Symbols", - "Forms", - "Operators", - "Miscellaneous", - "Drawing", - "Block", - "Shapes", - "Supplemental", - "Tags", -] - -RE_POSSIBLE_ENCODING_INDICATION = re_compile( - r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", - IGNORECASE, -) - -IANA_NO_ALIASES = [ - "cp720", - "cp737", - "cp856", - "cp874", - "cp875", - "cp1006", - "koi8_r", - "koi8_t", - "koi8_u", -] - -IANA_SUPPORTED: list[str] = sorted( - filter( - lambda x: x.endswith("_codec") is False - and x not in {"rot_13", "tactis", "mbcs"}, - list(set(aliases.values())) + IANA_NO_ALIASES, - ) -) - -IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) - -# pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { - "cp037": ["cp1026", "cp1140", "cp273", "cp500"], - "cp1026": ["cp037", "cp1140", "cp273", "cp500"], - "cp1125": ["cp866"], - "cp1140": ["cp037", "cp1026", "cp273", "cp500"], - "cp1250": ["iso8859_2"], - "cp1251": ["kz1048", "ptcp154"], - "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1253": ["iso8859_7"], - "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1257": ["iso8859_13"], - "cp273": ["cp037", "cp1026", "cp1140", "cp500"], - "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], - "cp500": ["cp037", "cp1026", "cp1140", "cp273"], - "cp850": ["cp437", "cp857", "cp858", "cp865"], - "cp857": ["cp850", "cp858", "cp865"], - "cp858": ["cp437", "cp850", "cp857", "cp865"], - "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], - "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], - "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], - "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], - "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], - "cp866": ["cp1125"], - "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], - "iso8859_11": ["tis_620"], - "iso8859_13": ["cp1257"], - "iso8859_14": [ - "iso8859_10", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_15": [ - "cp1252", - "cp1254", - "iso8859_10", - "iso8859_14", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_16": [ - "iso8859_14", - "iso8859_15", - "iso8859_2", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], - "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], - "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], - "iso8859_7": ["cp1253"], - "iso8859_9": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "latin_1", - ], - "kz1048": ["cp1251", "ptcp154"], - "latin_1": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "iso8859_9", - ], - "mac_iceland": ["mac_roman", "mac_turkish"], - "mac_roman": ["mac_iceland", "mac_turkish"], - "mac_turkish": ["mac_iceland", "mac_roman"], - "ptcp154": ["cp1251", "kz1048"], - "tis_620": ["iso8859_11"], -} - - -CHARDET_CORRESPONDENCE: dict[str, str] = { - "iso2022_kr": "ISO-2022-KR", - "iso2022_jp": "ISO-2022-JP", - "euc_kr": "EUC-KR", - "tis_620": "TIS-620", - "utf_32": "UTF-32", - "euc_jp": "EUC-JP", - "koi8_r": "KOI8-R", - "iso8859_1": "ISO-8859-1", - "iso8859_2": "ISO-8859-2", - "iso8859_5": "ISO-8859-5", - "iso8859_6": "ISO-8859-6", - "iso8859_7": "ISO-8859-7", - "iso8859_8": "ISO-8859-8", - "utf_16": "UTF-16", - "cp855": "IBM855", - "mac_cyrillic": "MacCyrillic", - "gb2312": "GB2312", - "gb18030": "GB18030", - "cp932": "CP932", - "cp866": "IBM866", - "utf_8": "utf-8", - "utf_8_sig": "UTF-8-SIG", - "shift_jis": "SHIFT_JIS", - "big5": "Big5", - "cp1250": "windows-1250", - "cp1251": "windows-1251", - "cp1252": "Windows-1252", - "cp1253": "windows-1253", - "cp1255": "windows-1255", - "cp1256": "windows-1256", - "cp1254": "Windows-1254", - "cp949": "CP949", -} - - -COMMON_SAFE_ASCII_CHARACTERS: set[str] = { - "<", - ">", - "=", - ":", - "/", - "&", - ";", - "{", - "}", - "[", - "]", - ",", - "|", - '"', - "-", - "(", - ")", -} - -# Sample character sets — replace with full lists if needed -COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" - -COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" - -COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" - -# Combine all into a set -COMMON_CJK_CHARACTERS = set( - "".join( - [ - COMMON_CHINESE_CHARACTERS, - COMMON_JAPANESE_CHARACTERS, - COMMON_KOREAN_CHARACTERS, - ] - ) -) - -KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} - -# Logging LEVEL below DEBUG -TRACE: int = 5 - - -# Language label that contain the em dash "—" -# character are to be considered alternative seq to origin -FREQUENCIES: dict[str, list[str]] = { - "English": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "u", - "m", - "f", - "p", - "g", - "w", - "y", - "b", - "v", - "k", - "x", - "j", - "z", - "q", - ], - "English—": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "m", - "u", - "f", - "p", - "g", - "w", - "b", - "y", - "v", - "k", - "j", - "x", - "z", - "q", - ], - "German": [ - "e", - "n", - "i", - "r", - "s", - "t", - "a", - "d", - "h", - "u", - "l", - "g", - "o", - "c", - "m", - "b", - "f", - "k", - "w", - "z", - "p", - "v", - "ü", - "ä", - "ö", - "j", - ], - "French": [ - "e", - "a", - "s", - "n", - "i", - "t", - "r", - "l", - "u", - "o", - "d", - "c", - "p", - "m", - "é", - "v", - "g", - "f", - "b", - "h", - "q", - "à", - "x", - "è", - "y", - "j", - ], - "Dutch": [ - "e", - "n", - "a", - "i", - "r", - "t", - "o", - "d", - "s", - "l", - "g", - "h", - "v", - "m", - "u", - "k", - "c", - "p", - "b", - "w", - "j", - "z", - "f", - "y", - "x", - "ë", - ], - "Italian": [ - "e", - "i", - "a", - "o", - "n", - "l", - "t", - "r", - "s", - "c", - "d", - "u", - "p", - "m", - "g", - "v", - "f", - "b", - "z", - "h", - "q", - "è", - "à", - "k", - "y", - "ò", - ], - "Polish": [ - "a", - "i", - "o", - "e", - "n", - "r", - "z", - "w", - "s", - "c", - "t", - "k", - "y", - "d", - "p", - "m", - "u", - "l", - "j", - "ł", - "g", - "b", - "h", - "ą", - "ę", - "ó", - ], - "Spanish": [ - "e", - "a", - "o", - "n", - "s", - "r", - "i", - "l", - "d", - "t", - "c", - "u", - "m", - "p", - "b", - "g", - "v", - "f", - "y", - "ó", - "h", - "q", - "í", - "j", - "z", - "á", - ], - "Russian": [ - "о", - "а", - "е", - "и", - "н", - "с", - "т", - "р", - "в", - "л", - "к", - "м", - "д", - "п", - "у", - "г", - "я", - "ы", - "з", - "б", - "й", - "ь", - "ч", - "х", - "ж", - "ц", - ], - # Jap-Kanji - "Japanese": [ - "人", - "一", - "大", - "亅", - "丁", - "丨", - "竹", - "笑", - "口", - "日", - "今", - "二", - "彳", - "行", - "十", - "土", - "丶", - "寸", - "寺", - "時", - "乙", - "丿", - "乂", - "气", - "気", - "冂", - "巾", - "亠", - "市", - "目", - "儿", - "見", - "八", - "小", - "凵", - "県", - "月", - "彐", - "門", - "間", - "木", - "東", - "山", - "出", - "本", - "中", - "刀", - "分", - "耳", - "又", - "取", - "最", - "言", - "田", - "心", - "思", - "刂", - "前", - "京", - "尹", - "事", - "生", - "厶", - "云", - "会", - "未", - "来", - "白", - "冫", - "楽", - "灬", - "馬", - "尸", - "尺", - "駅", - "明", - "耂", - "者", - "了", - "阝", - "都", - "高", - "卜", - "占", - "厂", - "广", - "店", - "子", - "申", - "奄", - "亻", - "俺", - "上", - "方", - "冖", - "学", - "衣", - "艮", - "食", - "自", - ], - # Jap-Katakana - "Japanese—": [ - "ー", - "ン", - "ス", - "・", - "ル", - "ト", - "リ", - "イ", - "ア", - "ラ", - "ッ", - "ク", - "ド", - "シ", - "レ", - "ジ", - "タ", - "フ", - "ロ", - "カ", - "テ", - "マ", - "ィ", - "グ", - "バ", - "ム", - "プ", - "オ", - "コ", - "デ", - "ニ", - "ウ", - "メ", - "サ", - "ビ", - "ナ", - "ブ", - "ャ", - "エ", - "ュ", - "チ", - "キ", - "ズ", - "ダ", - "パ", - "ミ", - "ェ", - "ョ", - "ハ", - "セ", - "ベ", - "ガ", - "モ", - "ツ", - "ネ", - "ボ", - "ソ", - "ノ", - "ァ", - "ヴ", - "ワ", - "ポ", - "ペ", - "ピ", - "ケ", - "ゴ", - "ギ", - "ザ", - "ホ", - "ゲ", - "ォ", - "ヤ", - "ヒ", - "ユ", - "ヨ", - "ヘ", - "ゼ", - "ヌ", - "ゥ", - "ゾ", - "ヶ", - "ヂ", - "ヲ", - "ヅ", - "ヵ", - "ヱ", - "ヰ", - "ヮ", - "ヽ", - "゠", - "ヾ", - "ヷ", - "ヿ", - "ヸ", - "ヹ", - "ヺ", - ], - # Jap-Hiragana - "Japanese——": [ - "の", - "に", - "る", - "た", - "と", - "は", - "し", - "い", - "を", - "で", - "て", - "が", - "な", - "れ", - "か", - "ら", - "さ", - "っ", - "り", - "す", - "あ", - "も", - "こ", - "ま", - "う", - "く", - "よ", - "き", - "ん", - "め", - "お", - "け", - "そ", - "つ", - "だ", - "や", - "え", - "ど", - "わ", - "ち", - "み", - "せ", - "じ", - "ば", - "へ", - "び", - "ず", - "ろ", - "ほ", - "げ", - "む", - "べ", - "ひ", - "ょ", - "ゆ", - "ぶ", - "ご", - "ゃ", - "ね", - "ふ", - "ぐ", - "ぎ", - "ぼ", - "ゅ", - "づ", - "ざ", - "ぞ", - "ぬ", - "ぜ", - "ぱ", - "ぽ", - "ぷ", - "ぴ", - "ぃ", - "ぁ", - "ぇ", - "ぺ", - "ゞ", - "ぢ", - "ぉ", - "ぅ", - "ゐ", - "ゝ", - "ゑ", - "゛", - "゜", - "ゎ", - "ゔ", - "゚", - "ゟ", - "゙", - "ゕ", - "ゖ", - ], - "Portuguese": [ - "a", - "e", - "o", - "s", - "i", - "r", - "d", - "n", - "t", - "m", - "u", - "c", - "l", - "p", - "g", - "v", - "b", - "f", - "h", - "ã", - "q", - "é", - "ç", - "á", - "z", - "í", - ], - "Swedish": [ - "e", - "a", - "n", - "r", - "t", - "s", - "i", - "l", - "d", - "o", - "m", - "k", - "g", - "v", - "h", - "f", - "u", - "p", - "ä", - "c", - "b", - "ö", - "å", - "y", - "j", - "x", - ], - "Chinese": [ - "的", - "一", - "是", - "不", - "了", - "在", - "人", - "有", - "我", - "他", - "这", - "个", - "们", - "中", - "来", - "上", - "大", - "为", - "和", - "国", - "地", - "到", - "以", - "说", - "时", - "要", - "就", - "出", - "会", - "可", - "也", - "你", - "对", - "生", - "能", - "而", - "子", - "那", - "得", - "于", - "着", - "下", - "自", - "之", - "年", - "过", - "发", - "后", - "作", - "里", - "用", - "道", - "行", - "所", - "然", - "家", - "种", - "事", - "成", - "方", - "多", - "经", - "么", - "去", - "法", - "学", - "如", - "都", - "同", - "现", - "当", - "没", - "动", - "面", - "起", - "看", - "定", - "天", - "分", - "还", - "进", - "好", - "小", - "部", - "其", - "些", - "主", - "样", - "理", - "心", - "她", - "本", - "前", - "开", - "但", - "因", - "只", - "从", - "想", - "实", - ], - "Ukrainian": [ - "о", - "а", - "н", - "і", - "и", - "р", - "в", - "т", - "е", - "с", - "к", - "л", - "у", - "д", - "м", - "п", - "з", - "я", - "ь", - "б", - "г", - "й", - "ч", - "х", - "ц", - "ї", - ], - "Norwegian": [ - "e", - "r", - "n", - "t", - "a", - "s", - "i", - "o", - "l", - "d", - "g", - "k", - "m", - "v", - "f", - "p", - "u", - "b", - "h", - "å", - "y", - "j", - "ø", - "c", - "æ", - "w", - ], - "Finnish": [ - "a", - "i", - "n", - "t", - "e", - "s", - "l", - "o", - "u", - "k", - "ä", - "m", - "r", - "v", - "j", - "h", - "p", - "y", - "d", - "ö", - "g", - "c", - "b", - "f", - "w", - "z", - ], - "Vietnamese": [ - "n", - "h", - "t", - "i", - "c", - "g", - "a", - "o", - "u", - "m", - "l", - "r", - "à", - "đ", - "s", - "e", - "v", - "p", - "b", - "y", - "ư", - "d", - "á", - "k", - "ộ", - "ế", - ], - "Czech": [ - "o", - "e", - "a", - "n", - "t", - "s", - "i", - "l", - "v", - "r", - "k", - "d", - "u", - "m", - "p", - "í", - "c", - "h", - "z", - "á", - "y", - "j", - "b", - "ě", - "é", - "ř", - ], - "Hungarian": [ - "e", - "a", - "t", - "l", - "s", - "n", - "k", - "r", - "i", - "o", - "z", - "á", - "é", - "g", - "m", - "b", - "y", - "v", - "d", - "h", - "u", - "p", - "j", - "ö", - "f", - "c", - ], - "Korean": [ - "이", - "다", - "에", - "의", - "는", - "로", - "하", - "을", - "가", - "고", - "지", - "서", - "한", - "은", - "기", - "으", - "년", - "대", - "사", - "시", - "를", - "리", - "도", - "인", - "스", - "일", - ], - "Indonesian": [ - "a", - "n", - "e", - "i", - "r", - "t", - "u", - "s", - "d", - "k", - "m", - "l", - "g", - "p", - "b", - "o", - "h", - "y", - "j", - "c", - "w", - "f", - "v", - "z", - "x", - "q", - ], - "Turkish": [ - "a", - "e", - "i", - "n", - "r", - "l", - "ı", - "k", - "d", - "t", - "s", - "m", - "y", - "u", - "o", - "b", - "ü", - "ş", - "v", - "g", - "z", - "h", - "c", - "p", - "ç", - "ğ", - ], - "Romanian": [ - "e", - "i", - "a", - "r", - "n", - "t", - "u", - "l", - "o", - "c", - "s", - "d", - "p", - "m", - "ă", - "f", - "v", - "î", - "g", - "b", - "ș", - "ț", - "z", - "h", - "â", - "j", - ], - "Farsi": [ - "ا", - "ی", - "ر", - "د", - "ن", - "ه", - "و", - "م", - "ت", - "ب", - "س", - "ل", - "ک", - "ش", - "ز", - "ف", - "گ", - "ع", - "خ", - "ق", - "ج", - "آ", - "پ", - "ح", - "ط", - "ص", - ], - "Arabic": [ - "ا", - "ل", - "ي", - "م", - "و", - "ن", - "ر", - "ت", - "ب", - "ة", - "ع", - "د", - "س", - "ف", - "ه", - "ك", - "ق", - "أ", - "ح", - "ج", - "ش", - "ط", - "ص", - "ى", - "خ", - "إ", - ], - "Danish": [ - "e", - "r", - "n", - "t", - "a", - "i", - "s", - "d", - "l", - "o", - "g", - "m", - "k", - "f", - "v", - "u", - "b", - "h", - "p", - "å", - "y", - "ø", - "æ", - "c", - "j", - "w", - ], - "Serbian": [ - "а", - "и", - "о", - "е", - "н", - "р", - "с", - "у", - "т", - "к", - "ј", - "в", - "д", - "м", - "п", - "л", - "г", - "з", - "б", - "a", - "i", - "e", - "o", - "n", - "ц", - "ш", - ], - "Lithuanian": [ - "i", - "a", - "s", - "o", - "r", - "e", - "t", - "n", - "u", - "k", - "m", - "l", - "p", - "v", - "d", - "j", - "g", - "ė", - "b", - "y", - "ų", - "š", - "ž", - "c", - "ą", - "į", - ], - "Slovene": [ - "e", - "a", - "i", - "o", - "n", - "r", - "s", - "l", - "t", - "j", - "v", - "k", - "d", - "p", - "m", - "u", - "z", - "b", - "g", - "h", - "č", - "c", - "š", - "ž", - "f", - "y", - ], - "Slovak": [ - "o", - "a", - "e", - "n", - "i", - "r", - "v", - "t", - "s", - "l", - "k", - "d", - "m", - "p", - "u", - "c", - "h", - "j", - "b", - "z", - "á", - "y", - "ý", - "í", - "č", - "é", - ], - "Hebrew": [ - "י", - "ו", - "ה", - "ל", - "ר", - "ב", - "ת", - "מ", - "א", - "ש", - "נ", - "ע", - "ם", - "ד", - "ק", - "ח", - "פ", - "ס", - "כ", - "ג", - "ט", - "צ", - "ן", - "ז", - "ך", - ], - "Bulgarian": [ - "а", - "и", - "о", - "е", - "н", - "т", - "р", - "с", - "в", - "л", - "к", - "д", - "п", - "м", - "з", - "г", - "я", - "ъ", - "у", - "б", - "ч", - "ц", - "й", - "ж", - "щ", - "х", - ], - "Croatian": [ - "a", - "i", - "o", - "e", - "n", - "r", - "j", - "s", - "t", - "u", - "k", - "l", - "v", - "d", - "m", - "p", - "g", - "z", - "b", - "c", - "č", - "h", - "š", - "ž", - "ć", - "f", - ], - "Hindi": [ - "क", - "र", - "स", - "न", - "त", - "म", - "ह", - "प", - "य", - "ल", - "व", - "ज", - "द", - "ग", - "ब", - "श", - "ट", - "अ", - "ए", - "थ", - "भ", - "ड", - "च", - "ध", - "ष", - "इ", - ], - "Estonian": [ - "a", - "i", - "e", - "s", - "t", - "l", - "u", - "n", - "o", - "k", - "r", - "d", - "m", - "v", - "g", - "p", - "j", - "h", - "ä", - "b", - "õ", - "ü", - "f", - "c", - "ö", - "y", - ], - "Thai": [ - "า", - "น", - "ร", - "อ", - "ก", - "เ", - "ง", - "ม", - "ย", - "ล", - "ว", - "ด", - "ท", - "ส", - "ต", - "ะ", - "ป", - "บ", - "ค", - "ห", - "แ", - "จ", - "พ", - "ช", - "ข", - "ใ", - ], - "Greek": [ - "α", - "τ", - "ο", - "ι", - "ε", - "ν", - "ρ", - "σ", - "κ", - "η", - "π", - "ς", - "υ", - "μ", - "λ", - "ί", - "ό", - "ά", - "γ", - "έ", - "δ", - "ή", - "ω", - "χ", - "θ", - "ύ", - ], - "Tamil": [ - "க", - "த", - "ப", - "ட", - "ர", - "ம", - "ல", - "ன", - "வ", - "ற", - "ய", - "ள", - "ச", - "ந", - "இ", - "ண", - "அ", - "ஆ", - "ழ", - "ங", - "எ", - "உ", - "ஒ", - "ஸ", - ], - "Kazakh": [ - "а", - "ы", - "е", - "н", - "т", - "р", - "л", - "і", - "д", - "с", - "м", - "қ", - "к", - "о", - "б", - "и", - "у", - "ғ", - "ж", - "ң", - "з", - "ш", - "й", - "п", - "г", - "ө", - ], -} - -LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py deleted file mode 100644 index e221bec..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/legacy.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any -from warnings import warn - -from .api import from_bytes -from .constant import CHARDET_CORRESPONDENCE - -# TODO: remove this check when dropping Python 3.7 support -if TYPE_CHECKING: - from typing_extensions import TypedDict - - class ResultDict(TypedDict): - encoding: str | None - language: str - confidence: float | None - - -def detect( - byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any -) -> ResultDict: - """ - chardet legacy method - Detect the encoding of the given byte string. It should be mostly backward-compatible. - Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) - This function is deprecated and should be used to migrate your project easily, consult the documentation for - further information. Not planned for removal. - - :param byte_str: The byte sequence to examine. - :param should_rename_legacy: Should we rename legacy encodings - to their more modern equivalents? - """ - if len(kwargs): - warn( - f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" - ) - - if not isinstance(byte_str, (bytearray, bytes)): - raise TypeError( # pragma: nocover - f"Expected object of type bytes or bytearray, got: {type(byte_str)}" - ) - - if isinstance(byte_str, bytearray): - byte_str = bytes(byte_str) - - r = from_bytes(byte_str).best() - - encoding = r.encoding if r is not None else None - language = r.language if r is not None and r.language != "Unknown" else "" - confidence = 1.0 - r.chaos if r is not None else None - - # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process - # but chardet does return 'utf-8-sig' and it is a valid codec name. - if r is not None and encoding == "utf_8" and r.bom: - encoding += "_sig" - - if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: - encoding = CHARDET_CORRESPONDENCE[encoding] - - return { - "encoding": encoding, - "language": language, - "confidence": confidence, - } diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index 277f1d0..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md.py b/venv/lib/python3.10/site-packages/charset_normalizer/md.py deleted file mode 100644 index 12ce024..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/md.py +++ /dev/null @@ -1,635 +0,0 @@ -from __future__ import annotations - -from functools import lru_cache -from logging import getLogger - -from .constant import ( - COMMON_SAFE_ASCII_CHARACTERS, - TRACE, - UNICODE_SECONDARY_RANGE_KEYWORD, -) -from .utils import ( - is_accentuated, - is_arabic, - is_arabic_isolated_form, - is_case_variable, - is_cjk, - is_emoticon, - is_hangul, - is_hiragana, - is_katakana, - is_latin, - is_punctuation, - is_separator, - is_symbol, - is_thai, - is_unprintable, - remove_accent, - unicode_range, - is_cjk_uncommon, -) - - -class MessDetectorPlugin: - """ - Base abstract class used for mess detection plugins. - All detectors MUST extend and implement given methods. - """ - - def eligible(self, character: str) -> bool: - """ - Determine if given character should be fed in. - """ - raise NotImplementedError # pragma: nocover - - def feed(self, character: str) -> None: - """ - The main routine to be executed upon character. - Insert the logic in witch the text would be considered chaotic. - """ - raise NotImplementedError # pragma: nocover - - def reset(self) -> None: # pragma: no cover - """ - Permit to reset the plugin to the initial state. - """ - raise NotImplementedError - - @property - def ratio(self) -> float: - """ - Compute the chaos ratio based on what your feed() has seen. - Must NOT be lower than 0.; No restriction gt 0. - """ - raise NotImplementedError # pragma: nocover - - -class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._punctuation_count: int = 0 - self._symbol_count: int = 0 - self._character_count: int = 0 - - self._last_printable_char: str | None = None - self._frenzy_symbol_in_word: bool = False - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character != self._last_printable_char - and character not in COMMON_SAFE_ASCII_CHARACTERS - ): - if is_punctuation(character): - self._punctuation_count += 1 - elif ( - character.isdigit() is False - and is_symbol(character) - and is_emoticon(character) is False - ): - self._symbol_count += 2 - - self._last_printable_char = character - - def reset(self) -> None: # Abstract - self._punctuation_count = 0 - self._character_count = 0 - self._symbol_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - ratio_of_punctuation: float = ( - self._punctuation_count + self._symbol_count - ) / self._character_count - - return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 - - -class TooManyAccentuatedPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._accentuated_count: int = 0 - - def eligible(self, character: str) -> bool: - return character.isalpha() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_accentuated(character): - self._accentuated_count += 1 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._accentuated_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - ratio_of_accentuation: float = self._accentuated_count / self._character_count - return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 - - -class UnprintablePlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._unprintable_count: int = 0 - self._character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if is_unprintable(character): - self._unprintable_count += 1 - self._character_count += 1 - - def reset(self) -> None: # Abstract - self._unprintable_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._unprintable_count * 8) / self._character_count - - -class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._successive_count: int = 0 - self._character_count: int = 0 - - self._last_latin_character: str | None = None - - def eligible(self, character: str) -> bool: - return character.isalpha() and is_latin(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - if ( - self._last_latin_character is not None - and is_accentuated(character) - and is_accentuated(self._last_latin_character) - ): - if character.isupper() and self._last_latin_character.isupper(): - self._successive_count += 1 - # Worse if its the same char duplicated with different accent. - if remove_accent(character) == remove_accent(self._last_latin_character): - self._successive_count += 1 - self._last_latin_character = character - - def reset(self) -> None: # Abstract - self._successive_count = 0 - self._character_count = 0 - self._last_latin_character = None - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._successive_count * 2) / self._character_count - - -class SuspiciousRange(MessDetectorPlugin): - def __init__(self) -> None: - self._suspicious_successive_range_count: int = 0 - self._character_count: int = 0 - self._last_printable_seen: str | None = None - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character.isspace() - or is_punctuation(character) - or character in COMMON_SAFE_ASCII_CHARACTERS - ): - self._last_printable_seen = None - return - - if self._last_printable_seen is None: - self._last_printable_seen = character - return - - unicode_range_a: str | None = unicode_range(self._last_printable_seen) - unicode_range_b: str | None = unicode_range(character) - - if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): - self._suspicious_successive_range_count += 1 - - self._last_printable_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._suspicious_successive_range_count = 0 - self._last_printable_seen = None - - @property - def ratio(self) -> float: - if self._character_count <= 13: - return 0.0 - - ratio_of_suspicious_range_usage: float = ( - self._suspicious_successive_range_count * 2 - ) / self._character_count - - return ratio_of_suspicious_range_usage - - -class SuperWeirdWordPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._word_count: int = 0 - self._bad_word_count: int = 0 - self._foreign_long_count: int = 0 - - self._is_current_word_bad: bool = False - self._foreign_long_watch: bool = False - - self._character_count: int = 0 - self._bad_character_count: int = 0 - - self._buffer: str = "" - self._buffer_accent_count: int = 0 - self._buffer_glyph_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character.isalpha(): - self._buffer += character - if is_accentuated(character): - self._buffer_accent_count += 1 - if ( - self._foreign_long_watch is False - and (is_latin(character) is False or is_accentuated(character)) - and is_cjk(character) is False - and is_hangul(character) is False - and is_katakana(character) is False - and is_hiragana(character) is False - and is_thai(character) is False - ): - self._foreign_long_watch = True - if ( - is_cjk(character) - or is_hangul(character) - or is_katakana(character) - or is_hiragana(character) - or is_thai(character) - ): - self._buffer_glyph_count += 1 - return - if not self._buffer: - return - if ( - character.isspace() or is_punctuation(character) or is_separator(character) - ) and self._buffer: - self._word_count += 1 - buffer_length: int = len(self._buffer) - - self._character_count += buffer_length - - if buffer_length >= 4: - if self._buffer_accent_count / buffer_length >= 0.5: - self._is_current_word_bad = True - # Word/Buffer ending with an upper case accentuated letter are so rare, - # that we will consider them all as suspicious. Same weight as foreign_long suspicious. - elif ( - is_accentuated(self._buffer[-1]) - and self._buffer[-1].isupper() - and all(_.isupper() for _ in self._buffer) is False - ): - self._foreign_long_count += 1 - self._is_current_word_bad = True - elif self._buffer_glyph_count == 1: - self._is_current_word_bad = True - self._foreign_long_count += 1 - if buffer_length >= 24 and self._foreign_long_watch: - camel_case_dst = [ - i - for c, i in zip(self._buffer, range(0, buffer_length)) - if c.isupper() - ] - probable_camel_cased: bool = False - - if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): - probable_camel_cased = True - - if not probable_camel_cased: - self._foreign_long_count += 1 - self._is_current_word_bad = True - - if self._is_current_word_bad: - self._bad_word_count += 1 - self._bad_character_count += len(self._buffer) - self._is_current_word_bad = False - - self._foreign_long_watch = False - self._buffer = "" - self._buffer_accent_count = 0 - self._buffer_glyph_count = 0 - elif ( - character not in {"<", ">", "-", "=", "~", "|", "_"} - and character.isdigit() is False - and is_symbol(character) - ): - self._is_current_word_bad = True - self._buffer += character - - def reset(self) -> None: # Abstract - self._buffer = "" - self._is_current_word_bad = False - self._foreign_long_watch = False - self._bad_word_count = 0 - self._word_count = 0 - self._character_count = 0 - self._bad_character_count = 0 - self._foreign_long_count = 0 - - @property - def ratio(self) -> float: - if self._word_count <= 10 and self._foreign_long_count == 0: - return 0.0 - - return self._bad_character_count / self._character_count - - -class CjkUncommonPlugin(MessDetectorPlugin): - """ - Detect messy CJK text that probably means nothing. - """ - - def __init__(self) -> None: - self._character_count: int = 0 - self._uncommon_count: int = 0 - - def eligible(self, character: str) -> bool: - return is_cjk(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_cjk_uncommon(character): - self._uncommon_count += 1 - return - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._uncommon_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - uncommon_form_usage: float = self._uncommon_count / self._character_count - - # we can be pretty sure it's garbage when uncommon characters are widely - # used. otherwise it could just be traditional chinese for example. - return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 - - -class ArchaicUpperLowerPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._buf: bool = False - - self._character_count_since_last_sep: int = 0 - - self._successive_upper_lower_count: int = 0 - self._successive_upper_lower_count_final: int = 0 - - self._character_count: int = 0 - - self._last_alpha_seen: str | None = None - self._current_ascii_only: bool = True - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - is_concerned = character.isalpha() and is_case_variable(character) - chunk_sep = is_concerned is False - - if chunk_sep and self._character_count_since_last_sep > 0: - if ( - self._character_count_since_last_sep <= 64 - and character.isdigit() is False - and self._current_ascii_only is False - ): - self._successive_upper_lower_count_final += ( - self._successive_upper_lower_count - ) - - self._successive_upper_lower_count = 0 - self._character_count_since_last_sep = 0 - self._last_alpha_seen = None - self._buf = False - self._character_count += 1 - self._current_ascii_only = True - - return - - if self._current_ascii_only is True and character.isascii() is False: - self._current_ascii_only = False - - if self._last_alpha_seen is not None: - if (character.isupper() and self._last_alpha_seen.islower()) or ( - character.islower() and self._last_alpha_seen.isupper() - ): - if self._buf is True: - self._successive_upper_lower_count += 2 - self._buf = False - else: - self._buf = True - else: - self._buf = False - - self._character_count += 1 - self._character_count_since_last_sep += 1 - self._last_alpha_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._character_count_since_last_sep = 0 - self._successive_upper_lower_count = 0 - self._successive_upper_lower_count_final = 0 - self._last_alpha_seen = None - self._buf = False - self._current_ascii_only = True - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return self._successive_upper_lower_count_final / self._character_count - - -class ArabicIsolatedFormPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._isolated_form_count: int = 0 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._isolated_form_count = 0 - - def eligible(self, character: str) -> bool: - return is_arabic(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_arabic_isolated_form(character): - self._isolated_form_count += 1 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - isolated_form_usage: float = self._isolated_form_count / self._character_count - - return isolated_form_usage - - -@lru_cache(maxsize=1024) -def is_suspiciously_successive_range( - unicode_range_a: str | None, unicode_range_b: str | None -) -> bool: - """ - Determine if two Unicode range seen next to each other can be considered as suspicious. - """ - if unicode_range_a is None or unicode_range_b is None: - return True - - if unicode_range_a == unicode_range_b: - return False - - if "Latin" in unicode_range_a and "Latin" in unicode_range_b: - return False - - if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: - return False - - # Latin characters can be accompanied with a combining diacritical mark - # eg. Vietnamese. - if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( - "Combining" in unicode_range_a or "Combining" in unicode_range_b - ): - return False - - keywords_range_a, keywords_range_b = ( - unicode_range_a.split(" "), - unicode_range_b.split(" "), - ) - - for el in keywords_range_a: - if el in UNICODE_SECONDARY_RANGE_KEYWORD: - continue - if el in keywords_range_b: - return False - - # Japanese Exception - range_a_jp_chars, range_b_jp_chars = ( - unicode_range_a - in ( - "Hiragana", - "Katakana", - ), - unicode_range_b in ("Hiragana", "Katakana"), - ) - if (range_a_jp_chars or range_b_jp_chars) and ( - "CJK" in unicode_range_a or "CJK" in unicode_range_b - ): - return False - if range_a_jp_chars and range_b_jp_chars: - return False - - if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: - if "CJK" in unicode_range_a or "CJK" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - # Chinese/Japanese use dedicated range for punctuation and/or separators. - if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( - unicode_range_a in ["Katakana", "Hiragana"] - and unicode_range_b in ["Katakana", "Hiragana"] - ): - if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: - return False - if "Forms" in unicode_range_a or "Forms" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - return True - - -@lru_cache(maxsize=2048) -def mess_ratio( - decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False -) -> float: - """ - Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. - """ - - detectors: list[MessDetectorPlugin] = [ - md_class() for md_class in MessDetectorPlugin.__subclasses__() - ] - - length: int = len(decoded_sequence) + 1 - - mean_mess_ratio: float = 0.0 - - if length < 512: - intermediary_mean_mess_ratio_calc: int = 32 - elif length <= 1024: - intermediary_mean_mess_ratio_calc = 64 - else: - intermediary_mean_mess_ratio_calc = 128 - - for character, index in zip(decoded_sequence + "\n", range(length)): - for detector in detectors: - if detector.eligible(character): - detector.feed(character) - - if ( - index > 0 and index % intermediary_mean_mess_ratio_calc == 0 - ) or index == length - 1: - mean_mess_ratio = sum(dt.ratio for dt in detectors) - - if mean_mess_ratio >= maximum_threshold: - break - - if debug: - logger = getLogger("charset_normalizer") - - logger.log( - TRACE, - "Mess-detector extended-analysis start. " - f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " - f"maximum_threshold={maximum_threshold}", - ) - - if len(decoded_sequence) > 16: - logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") - logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - - for dt in detectors: - logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") - - return round(mean_mess_ratio, 3) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index dd2519b..0000000 Binary files a/venv/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/models.py b/venv/lib/python3.10/site-packages/charset_normalizer/models.py deleted file mode 100644 index 1042758..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/models.py +++ /dev/null @@ -1,360 +0,0 @@ -from __future__ import annotations - -from encodings.aliases import aliases -from hashlib import sha256 -from json import dumps -from re import sub -from typing import Any, Iterator, List, Tuple - -from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE -from .utils import iana_name, is_multi_byte_encoding, unicode_range - - -class CharsetMatch: - def __init__( - self, - payload: bytes, - guessed_encoding: str, - mean_mess_ratio: float, - has_sig_or_bom: bool, - languages: CoherenceMatches, - decoded_payload: str | None = None, - preemptive_declaration: str | None = None, - ): - self._payload: bytes = payload - - self._encoding: str = guessed_encoding - self._mean_mess_ratio: float = mean_mess_ratio - self._languages: CoherenceMatches = languages - self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: list[str] | None = None - - self._leaves: list[CharsetMatch] = [] - self._mean_coherence_ratio: float = 0.0 - - self._output_payload: bytes | None = None - self._output_encoding: str | None = None - - self._string: str | None = decoded_payload - - self._preemptive_declaration: str | None = preemptive_declaration - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CharsetMatch): - if isinstance(other, str): - return iana_name(other) == self.encoding - return False - return self.encoding == other.encoding and self.fingerprint == other.fingerprint - - def __lt__(self, other: object) -> bool: - """ - Implemented to make sorted available upon CharsetMatches items. - """ - if not isinstance(other, CharsetMatch): - raise ValueError - - chaos_difference: float = abs(self.chaos - other.chaos) - coherence_difference: float = abs(self.coherence - other.coherence) - - # Below 1% difference --> Use Coherence - if chaos_difference < 0.01 and coherence_difference > 0.02: - return self.coherence > other.coherence - elif chaos_difference < 0.01 and coherence_difference <= 0.02: - # When having a difficult decision, use the result that decoded as many multi-byte as possible. - # preserve RAM usage! - if len(self._payload) >= TOO_BIG_SEQUENCE: - return self.chaos < other.chaos - return self.multi_byte_usage > other.multi_byte_usage - - return self.chaos < other.chaos - - @property - def multi_byte_usage(self) -> float: - return 1.0 - (len(str(self)) / len(self.raw)) - - def __str__(self) -> str: - # Lazy Str Loading - if self._string is None: - self._string = str(self._payload, self._encoding, "strict") - return self._string - - def __repr__(self) -> str: - return f"" - - def add_submatch(self, other: CharsetMatch) -> None: - if not isinstance(other, CharsetMatch) or other == self: - raise ValueError( - "Unable to add instance <{}> as a submatch of a CharsetMatch".format( - other.__class__ - ) - ) - - other._string = None # Unload RAM usage; dirty trick. - self._leaves.append(other) - - @property - def encoding(self) -> str: - return self._encoding - - @property - def encoding_aliases(self) -> list[str]: - """ - Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. - """ - also_known_as: list[str] = [] - for u, p in aliases.items(): - if self.encoding == u: - also_known_as.append(p) - elif self.encoding == p: - also_known_as.append(u) - return also_known_as - - @property - def bom(self) -> bool: - return self._has_sig_or_bom - - @property - def byte_order_mark(self) -> bool: - return self._has_sig_or_bom - - @property - def languages(self) -> list[str]: - """ - Return the complete list of possible languages found in decoded sequence. - Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. - """ - return [e[0] for e in self._languages] - - @property - def language(self) -> str: - """ - Most probable language found in decoded sequence. If none were detected or inferred, the property will return - "Unknown". - """ - if not self._languages: - # Trying to infer the language based on the given encoding - # Its either English or we should not pronounce ourselves in certain cases. - if "ascii" in self.could_be_from_charset: - return "English" - - # doing it there to avoid circular import - from charset_normalizer.cd import encoding_languages, mb_encoding_languages - - languages = ( - mb_encoding_languages(self.encoding) - if is_multi_byte_encoding(self.encoding) - else encoding_languages(self.encoding) - ) - - if len(languages) == 0 or "Latin Based" in languages: - return "Unknown" - - return languages[0] - - return self._languages[0][0] - - @property - def chaos(self) -> float: - return self._mean_mess_ratio - - @property - def coherence(self) -> float: - if not self._languages: - return 0.0 - return self._languages[0][1] - - @property - def percent_chaos(self) -> float: - return round(self.chaos * 100, ndigits=3) - - @property - def percent_coherence(self) -> float: - return round(self.coherence * 100, ndigits=3) - - @property - def raw(self) -> bytes: - """ - Original untouched bytes. - """ - return self._payload - - @property - def submatch(self) -> list[CharsetMatch]: - return self._leaves - - @property - def has_submatch(self) -> bool: - return len(self._leaves) > 0 - - @property - def alphabets(self) -> list[str]: - if self._unicode_ranges is not None: - return self._unicode_ranges - # list detected ranges - detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] - # filter and sort - self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) - return self._unicode_ranges - - @property - def could_be_from_charset(self) -> list[str]: - """ - The complete list of encoding that output the exact SAME str result and therefore could be the originating - encoding. - This list does include the encoding available in property 'encoding'. - """ - return [self._encoding] + [m.encoding for m in self._leaves] - - def output(self, encoding: str = "utf_8") -> bytes: - """ - Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. - Any errors will be simply ignored by the encoder NOT replaced. - """ - if self._output_encoding is None or self._output_encoding != encoding: - self._output_encoding = encoding - decoded_string = str(self) - if ( - self._preemptive_declaration is not None - and self._preemptive_declaration.lower() - not in ["utf-8", "utf8", "utf_8"] - ): - patched_header = sub( - RE_POSSIBLE_ENCODING_INDICATION, - lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], - iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] - ), - decoded_string[:8192], - count=1, - ) - - decoded_string = patched_header + decoded_string[8192:] - - self._output_payload = decoded_string.encode(encoding, "replace") - - return self._output_payload # type: ignore - - @property - def fingerprint(self) -> str: - """ - Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. - """ - return sha256(self.output()).hexdigest() - - -class CharsetMatches: - """ - Container with every CharsetMatch items ordered by default from most probable to the less one. - Act like a list(iterable) but does not implements all related methods. - """ - - def __init__(self, results: list[CharsetMatch] | None = None): - self._results: list[CharsetMatch] = sorted(results) if results else [] - - def __iter__(self) -> Iterator[CharsetMatch]: - yield from self._results - - def __getitem__(self, item: int | str) -> CharsetMatch: - """ - Retrieve a single item either by its position or encoding name (alias may be used here). - Raise KeyError upon invalid index or encoding not present in results. - """ - if isinstance(item, int): - return self._results[item] - if isinstance(item, str): - item = iana_name(item, False) - for result in self._results: - if item in result.could_be_from_charset: - return result - raise KeyError - - def __len__(self) -> int: - return len(self._results) - - def __bool__(self) -> bool: - return len(self._results) > 0 - - def append(self, item: CharsetMatch) -> None: - """ - Insert a single match. Will be inserted accordingly to preserve sort. - Can be inserted as a submatch. - """ - if not isinstance(item, CharsetMatch): - raise ValueError( - "Cannot append instance '{}' to CharsetMatches".format( - str(item.__class__) - ) - ) - # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) - if len(item.raw) < TOO_BIG_SEQUENCE: - for match in self._results: - if match.fingerprint == item.fingerprint and match.chaos == item.chaos: - match.add_submatch(item) - return - self._results.append(item) - self._results = sorted(self._results) - - def best(self) -> CharsetMatch | None: - """ - Simply return the first match. Strict equivalent to matches[0]. - """ - if not self._results: - return None - return self._results[0] - - def first(self) -> CharsetMatch | None: - """ - Redundant method, call the method best(). Kept for BC reasons. - """ - return self.best() - - -CoherenceMatch = Tuple[str, float] -CoherenceMatches = List[CoherenceMatch] - - -class CliDetectionResult: - def __init__( - self, - path: str, - encoding: str | None, - encoding_aliases: list[str], - alternative_encodings: list[str], - language: str, - alphabets: list[str], - has_sig_or_bom: bool, - chaos: float, - coherence: float, - unicode_path: str | None, - is_preferred: bool, - ): - self.path: str = path - self.unicode_path: str | None = unicode_path - self.encoding: str | None = encoding - self.encoding_aliases: list[str] = encoding_aliases - self.alternative_encodings: list[str] = alternative_encodings - self.language: str = language - self.alphabets: list[str] = alphabets - self.has_sig_or_bom: bool = has_sig_or_bom - self.chaos: float = chaos - self.coherence: float = coherence - self.is_preferred: bool = is_preferred - - @property - def __dict__(self) -> dict[str, Any]: # type: ignore - return { - "path": self.path, - "encoding": self.encoding, - "encoding_aliases": self.encoding_aliases, - "alternative_encodings": self.alternative_encodings, - "language": self.language, - "alphabets": self.alphabets, - "has_sig_or_bom": self.has_sig_or_bom, - "chaos": self.chaos, - "coherence": self.coherence, - "unicode_path": self.unicode_path, - "is_preferred": self.is_preferred, - } - - def to_json(self) -> str: - return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/py.typed b/venv/lib/python3.10/site-packages/charset_normalizer/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/utils.py b/venv/lib/python3.10/site-packages/charset_normalizer/utils.py deleted file mode 100644 index 6bf0384..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/utils.py +++ /dev/null @@ -1,414 +0,0 @@ -from __future__ import annotations - -import importlib -import logging -import unicodedata -from codecs import IncrementalDecoder -from encodings.aliases import aliases -from functools import lru_cache -from re import findall -from typing import Generator - -from _multibytecodec import ( # type: ignore[import-not-found,import] - MultibyteIncrementalDecoder, -) - -from .constant import ( - ENCODING_MARKS, - IANA_SUPPORTED_SIMILAR, - RE_POSSIBLE_ENCODING_INDICATION, - UNICODE_RANGES_COMBINED, - UNICODE_SECONDARY_RANGE_KEYWORD, - UTF8_MAXIMAL_ALLOCATION, - COMMON_CJK_CHARACTERS, -) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_accentuated(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return ( - "WITH GRAVE" in description - or "WITH ACUTE" in description - or "WITH CEDILLA" in description - or "WITH DIAERESIS" in description - or "WITH CIRCUMFLEX" in description - or "WITH TILDE" in description - or "WITH MACRON" in description - or "WITH RING ABOVE" in description - ) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def remove_accent(character: str) -> str: - decomposed: str = unicodedata.decomposition(character) - if not decomposed: - return character - - codes: list[str] = decomposed.split(" ") - - return chr(int(codes[0], 16)) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> str | None: - """ - Retrieve the Unicode range official name from a single character. - """ - character_ord: int = ord(character) - - for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): - if character_ord in ord_range: - return range_name - - return None - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_latin(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return "LATIN" in description - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_punctuation(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "P" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Punctuation" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_symbol(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "S" in character_category or "N" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Forms" in character_range and character_category != "Lo" - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_emoticon(character: str) -> bool: - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Emoticons" in character_range or "Pictographs" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_separator(character: str) -> bool: - if character.isspace() or character in {"|", "+", "<", ">"}: - return True - - character_category: str = unicodedata.category(character) - - return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_case_variable(character: str) -> bool: - return character.islower() != character.isupper() - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "CJK" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hiragana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HIRAGANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_katakana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "KATAKANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hangul(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HANGUL" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_thai(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "THAI" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic_isolated_form(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name and "ISOLATED FORM" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk_uncommon(character: str) -> bool: - return character not in COMMON_CJK_CHARACTERS - - -@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) -def is_unicode_range_secondary(range_name: str) -> bool: - return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_unprintable(character: str) -> bool: - return ( - character.isspace() is False # includes \n \t \r \v - and character.isprintable() is False - and character != "\x1a" # Why? Its the ASCII substitute character. - and character != "\ufeff" # bug discovered in Python, - # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. - ) - - -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: - """ - Extract using ASCII-only decoder any specified encoding in the first n-bytes. - """ - if not isinstance(sequence, bytes): - raise TypeError - - seq_len: int = len(sequence) - - results: list[str] = findall( - RE_POSSIBLE_ENCODING_INDICATION, - sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), - ) - - if len(results) == 0: - return None - - for specified_encoding in results: - specified_encoding = specified_encoding.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if encoding_alias == specified_encoding: - return encoding_iana - if encoding_iana == specified_encoding: - return encoding_iana - - return None - - -@lru_cache(maxsize=128) -def is_multi_byte_encoding(name: str) -> bool: - """ - Verify is a specific encoding is a multi byte one based on it IANA name - """ - return name in { - "utf_8", - "utf_8_sig", - "utf_16", - "utf_16_be", - "utf_16_le", - "utf_32", - "utf_32_le", - "utf_32_be", - "utf_7", - } or issubclass( - importlib.import_module(f"encodings.{name}").IncrementalDecoder, - MultibyteIncrementalDecoder, - ) - - -def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: - """ - Identify and extract SIG/BOM in given sequence. - """ - - for iana_encoding in ENCODING_MARKS: - marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] - - if isinstance(marks, bytes): - marks = [marks] - - for mark in marks: - if sequence.startswith(mark): - return iana_encoding, mark - - return None, b"" - - -def should_strip_sig_or_bom(iana_encoding: str) -> bool: - return iana_encoding not in {"utf_16", "utf_32"} - - -def iana_name(cp_name: str, strict: bool = True) -> str: - """Returns the Python normalized encoding name (Not the IANA official name).""" - cp_name = cp_name.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if cp_name in [encoding_alias, encoding_iana]: - return encoding_iana - - if strict: - raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") - - return cp_name - - -def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: - if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): - return 0.0 - - decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder - decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder - - id_a: IncrementalDecoder = decoder_a(errors="ignore") - id_b: IncrementalDecoder = decoder_b(errors="ignore") - - character_match_count: int = 0 - - for i in range(255): - to_be_decoded: bytes = bytes([i]) - if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): - character_match_count += 1 - - return character_match_count / 254 - - -def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: - """ - Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using - the function cp_similarity. - """ - return ( - iana_name_a in IANA_SUPPORTED_SIMILAR - and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] - ) - - -def set_logging_handler( - name: str = "charset_normalizer", - level: int = logging.INFO, - format_string: str = "%(asctime)s | %(levelname)s | %(message)s", -) -> None: - logger = logging.getLogger(name) - logger.setLevel(level) - - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter(format_string)) - logger.addHandler(handler) - - -def cut_sequence_chunks( - sequences: bytes, - encoding_iana: str, - offsets: range, - chunk_size: int, - bom_or_sig_available: bool, - strip_sig_or_bom: bool, - sig_payload: bytes, - is_multi_byte_decoder: bool, - decoded_payload: str | None = None, -) -> Generator[str, None, None]: - if decoded_payload and is_multi_byte_decoder is False: - for i in offsets: - chunk = decoded_payload[i : i + chunk_size] - if not chunk: - break - yield chunk - else: - for i in offsets: - chunk_end = i + chunk_size - if chunk_end > len(sequences) + 8: - continue - - cut_sequence = sequences[i : i + chunk_size] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode( - encoding_iana, - errors="ignore" if is_multi_byte_decoder else "strict", - ) - - # multi-byte bad cutting detector and adjustment - # not the cleanest way to perform that fix but clever enough for now. - if is_multi_byte_decoder and i > 0: - chunk_partial_size_chk: int = min(chunk_size, 16) - - if ( - decoded_payload - and chunk[:chunk_partial_size_chk] not in decoded_payload - ): - for j in range(i, i - 4, -1): - cut_sequence = sequences[j:chunk_end] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode(encoding_iana, errors="ignore") - - if chunk[:chunk_partial_size_chk] in decoded_payload: - break - - yield chunk diff --git a/venv/lib/python3.10/site-packages/charset_normalizer/version.py b/venv/lib/python3.10/site-packages/charset_normalizer/version.py deleted file mode 100644 index e5687e3..0000000 --- a/venv/lib/python3.10/site-packages/charset_normalizer/version.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Expose version -""" - -from __future__ import annotations - -__version__ = "3.4.2" -VERSION = __version__.split(".") diff --git a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/METADATA deleted file mode 100644 index e6c05af..0000000 --- a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/METADATA +++ /dev/null @@ -1,82 +0,0 @@ -Metadata-Version: 2.4 -Name: click -Version: 8.2.1 -Summary: Composable command line interface toolkit -Maintainer-email: Pallets -Requires-Python: >=3.10 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: colorama; platform_system == 'Windows' -Project-URL: Changes, https://click.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/click/ - -# $ click_ - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -## A Simple Example - -```python -import click - -@click.command() -@click.option("--count", default=1, help="Number of greetings.") -@click.option("--name", prompt="Your name", help="The person to greet.") -def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - -if __name__ == '__main__': - hello() -``` - -``` -$ python hello.py --count=3 -Your name: Click -Hello, Click! -Hello, Click! -Hello, Click! -``` - - -## Donate - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/RECORD deleted file mode 100644 index 621f456..0000000 --- a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/RECORD +++ /dev/null @@ -1,38 +0,0 @@ -click-8.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.2.1.dist-info/METADATA,sha256=dI1MbhHTLoKD2tNCCGnx9rK2gok23HDNylFeLKdLSik,2471 -click-8.2.1.dist-info/RECORD,, -click-8.2.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -click-8.2.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 -click/__pycache__/__init__.cpython-310.pyc,, -click/__pycache__/_compat.cpython-310.pyc,, -click/__pycache__/_termui_impl.cpython-310.pyc,, -click/__pycache__/_textwrap.cpython-310.pyc,, -click/__pycache__/_winconsole.cpython-310.pyc,, -click/__pycache__/core.cpython-310.pyc,, -click/__pycache__/decorators.cpython-310.pyc,, -click/__pycache__/exceptions.cpython-310.pyc,, -click/__pycache__/formatting.cpython-310.pyc,, -click/__pycache__/globals.cpython-310.pyc,, -click/__pycache__/parser.cpython-310.pyc,, -click/__pycache__/shell_completion.cpython-310.pyc,, -click/__pycache__/termui.cpython-310.pyc,, -click/__pycache__/testing.cpython-310.pyc,, -click/__pycache__/types.cpython-310.pyc,, -click/__pycache__/utils.cpython-310.pyc,, -click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 -click/_termui_impl.py,sha256=ASXhLi9IQIc0Js9KQSS-3-SLZcPet3VqysBf9WgbbpI,26712 -click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 -click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 -click/core.py,sha256=gUhpNS9cFBGdEXXdisGVG-eRvGf49RTyFagxulqwdFw,117343 -click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 -click/exceptions.py,sha256=1rdtXgHJ1b3OjGkN-UpXB9t_HCBihJvh_DtpmLmwn9s,9891 -click/formatting.py,sha256=Bhqx4QXdKQ9W4WKknIwj5KPKFmtduGOuGq1yw_THLZ8,9726 -click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 -click/parser.py,sha256=nU1Ah2p11q29ul1vNdU9swPo_PUuKrxU6YXToi71q1c,18979 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=CQSGdjgun4ORbOZrXP0CVhEtPx4knsufOkRsDiK64cM,19857 -click/termui.py,sha256=vAYrKC2a7f_NfEIhAThEVYfa__ib5XQbTSCGtJlABRA,30847 -click/testing.py,sha256=2eLdAaCJCGToP5Tw-XN8JjrDb3wbJIfARxg3d0crW5M,18702 -click/types.py,sha256=KBTRxN28cR1VZ5mb9iJX98MQSw_p9SGzljqfEI8z5Tw,38389 -click/utils.py,sha256=b1Mm-usEDBHtEwcPltPIn3zSK4nw2KTp5GC7_oSTlLo,20245 diff --git a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt b/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt deleted file mode 100644 index d12a849..0000000 --- a/venv/lib/python3.10/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/click/__init__.py b/venv/lib/python3.10/site-packages/click/__init__.py deleted file mode 100644 index 1aa547c..0000000 --- a/venv/lib/python3.10/site-packages/click/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" - -from __future__ import annotations - -from .core import Argument as Argument -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - from .core import _BaseCommand - - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - from .core import _MultiCommand - - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - if name == "OptionParser": - from .parser import _OptionParser - - warnings.warn( - "'OptionParser' is deprecated and will be removed in Click 9.0. The" - " old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return _OptionParser - - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Click 9.1. Use feature detection or" - " 'importlib.metadata.version(\"click\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("click") - - raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 456fcb8..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc deleted file mode 100644 index b725317..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc deleted file mode 100644 index 617a476..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/_termui_impl.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc deleted file mode 100644 index 739a5b8..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc deleted file mode 100644 index f2afd18..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/_winconsole.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc deleted file mode 100644 index b556abd..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/core.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc deleted file mode 100644 index d609e08..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/decorators.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc deleted file mode 100644 index 634896a..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/exceptions.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc deleted file mode 100644 index edb0ce6..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/formatting.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc deleted file mode 100644 index 6d1ee1c..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc deleted file mode 100644 index 9fba5d8..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc deleted file mode 100644 index a181a27..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc deleted file mode 100644 index 8873c08..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/termui.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc deleted file mode 100644 index 92de87b..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc deleted file mode 100644 index 0f193c7..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 984babe..0000000 Binary files a/venv/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/click/_compat.py b/venv/lib/python3.10/site-packages/click/_compat.py deleted file mode 100644 index f2726b9..0000000 --- a/venv/lib/python3.10/site-packages/click/_compat.py +++ /dev/null @@ -1,622 +0,0 @@ -from __future__ import annotations - -import codecs -import collections.abc as cabc -import io -import os -import re -import sys -import typing as t -from types import TracebackType -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write(b"") - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: str | None, errors: str | None -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: str | None, - errors: str | None, - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: str | os.PathLike[str] | int, - mode: str, - encoding: str | None, - errors: str | None, -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, -) -> tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: int | None = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> _AtomicFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s: str) -> int: - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write # type: ignore[method-assign] - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None - ) -> t.TextIO | None: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO | None], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.TextIO | None]: - cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO | None: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/venv/lib/python3.10/site-packages/click/_termui_impl.py b/venv/lib/python3.10/site-packages/click/_termui_impl.py deleted file mode 100644 index 51fd9bf..0000000 --- a/venv/lib/python3.10/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,839 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" - -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import math -import os -import shlex -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from pathlib import Path -from shutil import which -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: cabc.Iterable[V] | None, - length: int | None = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - label: str | None = None, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.hidden = hidden - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast("cabc.Iterable[V]", range(length)) - self.iter: cabc.Iterable[V] = iter(iterable) - self.length = length - self.pos: int = 0 - self.avg: list[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: int | None = None - self.entered: bool = False - self.current_item: V | None = None - self._is_atty = isatty(self.file) - self._last_line: str | None = None - - def __enter__(self) -> ProgressBar[V]: - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.render_finish() - - def __iter__(self) -> cabc.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.hidden or not self._is_atty: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.hidden: - return - - if not self._is_atty: - # Only output the label once if the output is not a TTY. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width and self.max_width is not None: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: V | None = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> cabc.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if not self._is_atty: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - - # Split and normalize the pager command into parts. - pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) - if pager_cmd_parts: - if WIN: - if _tempfilepager(generator, pager_cmd_parts, color): - return - elif _pipepager(generator, pager_cmd_parts, color): - return - - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if (WIN or sys.platform.startswith("os2")) and _tempfilepager( - generator, ["more"], color - ): - return - if _pipepager(generator, ["less"], color): - return - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if _pipepager(generator, ["more"], color): - return - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - cmd = cmd_parts[0] - cmd_params = cmd_parts[1:] - - cmd_filepath = which(cmd) - if not cmd_filepath: - return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() - cmd_name = cmd_path.name - - import subprocess - - # Make a local copy of the environment to not affect the global one. - env = dict(os.environ) - - # If we're piping to less and the user hasn't decided on colors, we enable - # them by default we find the -R flag in the command line arguments. - if color is None and cmd_name == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen( - [str(cmd_path)] + cmd_params, - shell=True, - stdin=subprocess.PIPE, - env=env, - errors="replace", - text=True, - ) - assert c.stdin is not None - try: - for text in generator: - if not color: - text = strip_ansi(text) - - c.stdin.write(text) - except BrokenPipeError: - # In case the pager exited unexpectedly, ignore the broken pipe error. - pass - except Exception as e: - # In case there is an exception we want to close the pager immediately - # and let the caller handle it. - # Otherwise the pager will keep running, and the user may not notice - # the error message, or worse yet it may leave the terminal in a broken state. - c.terminate() - raise e - finally: - # We must close stdin and wait for the pager to exit before we continue - try: - c.stdin.close() - # Close implies flush, so it might throw a BrokenPipeError if the pager - # process exited already. - except BrokenPipeError: - pass - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - return True - - -def _tempfilepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by invoking a program on a temporary file. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - cmd = cmd_parts[0] - - cmd_filepath = which(cmd) - if not cmd_filepath: - return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() - - import subprocess - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - subprocess.call([str(cmd_path), filename]) - except OSError: - # Command not found - pass - finally: - os.close(fd) - os.unlink(filename) - - return True - - -def _nullpager( - stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if which(editor) is not None: - return editor - return "vi" - - def edit_files(self, filenames: cabc.Iterable[str]) -> None: - import subprocess - - editor = self.get_editor() - environ: dict[str, str] | None = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - exc_filename = " ".join(f'"{filename}"' for filename in filenames) - - try: - c = subprocess.Popen( - args=f"{editor} {exc_filename}", env=environ, shell=True - ) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - @t.overload - def edit(self, text: bytes | bytearray) -> bytes | None: ... - - # We cannot know whether or not the type expected is str or bytes when None - # is passed, so str is returned as that was what was done before. - @t.overload - def edit(self, text: str | None) -> str | None: ... - - def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: - import tempfile - - if text is None: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_files((name,)) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url) - args = ["explorer", f"/select,{url}"] - else: - args = ["start"] - if wait: - args.append("/WAIT") - args.append("") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - elif CYGWIN: - if locate: - url = _unquote_file(url) - args = ["cygstart", os.path.dirname(url)] - else: - args = ["cygstart"] - if wait: - args.append("-w") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> None: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if sys.platform == "win32": - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - - if echo: - func = t.cast(t.Callable[[], str], msvcrt.getwche) - else: - func = t.cast(t.Callable[[], str], msvcrt.getwch) - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import termios - import tty - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - f: t.TextIO | None - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/venv/lib/python3.10/site-packages/click/_textwrap.py b/venv/lib/python3.10/site-packages/click/_textwrap.py deleted file mode 100644 index 97fbee3..0000000 --- a/venv/lib/python3.10/site-packages/click/_textwrap.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import textwrap -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: list[str], - cur_line: list[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> cabc.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/venv/lib/python3.10/site-packages/click/_winconsole.py b/venv/lib/python3.10/site-packages/click/_winconsole.py deleted file mode 100644 index e56c7c6..0000000 --- a/venv/lib/python3.10/site-packages/click/_winconsole.py +++ /dev/null @@ -1,296 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -from __future__ import annotations - -import collections.abc as cabc -import io -import sys -import time -import typing as t -from ctypes import Array -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -if t.TYPE_CHECKING: - try: - # Using `typing_extensions.Buffer` instead of `collections.abc` - # on Windows for some reason does not have `Sized` implemented. - from collections.abc import Buffer # type: ignore - except ImportError: - from typing_extensions import Buffer - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ # noqa: RUF012 - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: - buf = Py_buffer() - flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - out: Array[c_char] = buffer_type.from_address(buf.buf) - return out - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle: int | None) -> None: - self.handle = handle - - def isatty(self) -> t.Literal[True]: - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self) -> t.Literal[True]: - return True - - def readinto(self, b: Buffer) -> int: - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self) -> t.Literal[True]: - return True - - @staticmethod - def _get_error_message(errno: int) -> str: - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b: Buffer) -> int: - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self) -> str: - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None -) -> t.TextIO | None: - if ( - get_buffer is None - or encoding not in {"utf-16-le", None} - or errors not in {"strict", None} - or not _is_console(f) - ): - return None - - func = _stream_factories.get(f.fileno()) - if func is None: - return None - - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/venv/lib/python3.10/site-packages/click/core.py b/venv/lib/python3.10/site-packages/click/core.py deleted file mode 100644 index f57ada6..0000000 --- a/venv/lib/python3.10/site-packages/click/core.py +++ /dev/null @@ -1,3135 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from collections import Counter -from contextlib import AbstractContextManager -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import NoArgsIsHelpError -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import _OptionParser -from .parser import _split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound="t.Callable[..., t.Any]") -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: Context, incomplete: str -) -> cabc.Iterator[tuple[str, Command]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(Group, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_nested_chain( - base_command: Group, cmd_name: str, cmd: Command, register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, Group): - return - - if register: - message = ( - f"It is not possible to add the group {cmd_name!r} to another" - f" group {base_command.name!r} that is in chain mode." - ) - else: - message = ( - f"Found the group {cmd_name!r} as subcommand to another group " - f" {base_command.name!r} that is in chain mode. This is not supported." - ) - - raise RuntimeError(message) - - -def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size), strict=False)) - - -@contextmanager -def augment_usage_errors( - ctx: Context, param: Parameter | None = None -) -> cabc.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: cabc.Sequence[Parameter], - declaration_order: cabc.Sequence[Parameter], -) -> list[Parameter]: - """Returns all declared parameters in the order they should be processed. - - The declared parameters are re-shuffled depending on the order in which - they were invoked, as well as the eagerness of each parameters. - - The invocation order takes precedence over the declaration order. I.e. the - order in which the user provided them to the CLI is respected. - - This behavior and its effect on callback evaluation is detailed at: - https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order - """ - - def sort_key(item: Parameter) -> tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.2 - The ``protected_args`` attribute is deprecated and will be removed in - Click 9.0. ``args`` will contain remaining unparsed tokens. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: type[HelpFormatter] = HelpFormatter - - def __init__( - self, - command: Command, - parent: Context | None = None, - info_name: str | None = None, - obj: t.Any | None = None, - auto_envvar_prefix: str | None = None, - default_map: cabc.MutableMapping[str, t.Any] | None = None, - terminal_width: int | None = None, - max_content_width: int | None = None, - resilient_parsing: bool = False, - allow_extra_args: bool | None = None, - allow_interspersed_args: bool | None = None, - ignore_unknown_options: bool | None = None, - help_option_names: list[str] | None = None, - token_normalize_func: t.Callable[[str], str] | None = None, - color: bool | None = None, - show_default: bool | None = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: dict[str, t.Any] = {} - #: the leftover arguments. - self.args: list[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self._protected_args: list[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: str | None = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: int | None = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: int | None = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: list[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: str | None = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: bool | None = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: bool | None = show_default - - self._close_callbacks: list[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - @property - def protected_args(self) -> list[str]: - import warnings - - warnings.warn( - "'protected_args' is deprecated and will be removed in Click 9.0." - " 'args' will contain remaining unparsed tokens.", - DeprecationWarning, - stacklevel=2, - ) - return self._protected_args - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> Context: - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: AbstractContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> Context: - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: type[V]) -> V | None: - """Finds the closest object of a given type.""" - node: Context | None = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[False] = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def lookup_default(self, name: str, call: bool = True) -> t.Any | None: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> t.NoReturn: - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> t.NoReturn: - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> t.NoReturn: - """Exits the application with a given exit code. - - .. versionchanged:: 8.2 - Callbacks and context managers registered with :meth:`call_on_close` - and :meth:`with_resource` are closed before exiting. - """ - self.close() - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: Command) -> Context: - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> V: ... - - @t.overload - def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... - - def invoke( - self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> t.Any | V: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - - .. versionchanged:: 3.2 - A new context is created, and missing arguments use default values. - """ - if isinstance(callback, Command): - other_cmd = callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = self - - with augment_usage_errors(self): - with ctx: - return callback(*args, **kwargs) - - def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(cmd, Command): - raise TypeError("Callback is not a command.") - - for param in self.params: - if param not in kwargs: - kwargs[param] = self.params[param] - - return self.invoke(cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> ParameterSource | None: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class Command: - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the command is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. - - .. versionchanged:: 8.2 - This is the base class for all commands, not ``BaseCommand``. - ``deprecated`` can be set to a string as well to customize the - deprecation message. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: type[Context] = Context - - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: str | None, - context_settings: cabc.MutableMapping[str, t.Any] | None = None, - callback: t.Callable[..., t.Any] | None = None, - params: list[Parameter] | None = None, - help: str | None = None, - epilog: str | None = None, - short_help: str | None = None, - options_metavar: str | None = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool | str = False, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings - - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: list[Parameter] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self._help_option = None - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - return { - "name": self.name, - "params": [param.to_info_dict() for param in self.get_params(ctx)], - "help": self.help, - "epilog": self.epilog, - "short_help": self.short_help, - "hidden": self.hidden, - "deprecated": self.deprecated, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> list[Parameter]: - params = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - params = [*params, help_option] - - if __debug__: - import warnings - - opts = [opt for param in params for opt in param.opts] - opts_counter = Counter(opts) - duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) - - for duplicate_opt in duplicate_opts: - warnings.warn( - ( - f"The parameter {duplicate_opt} is used more than once. " - "Remove its duplicate as parameters should be unique." - ), - stacklevel=3, - ) - - return params - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> list[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> Option | None: - """Returns the help option object. - - Skipped if :attr:`add_help_option` is ``False``. - - .. versionchanged:: 8.1.8 - The help option is now cached to avoid creating it multiple times. - """ - help_option_names = self.get_help_option_names(ctx) - - if not help_option_names or not self.add_help_option: - return None - - # Cache the help option object in private _help_option attribute to - # avoid creating it multiple times. Not doing this will break the - # callback odering by iter_params_for_processing(), which relies on - # object comparison. - if self._help_option is None: - # Avoid circular import. - from .decorators import help_option - - # Apply help_option decorator and pop resulting option - help_option(*help_option_names)(self) - self._help_option = self.params.pop() # type: ignore[assignment] - - return self._help_option - - def make_parser(self, ctx: Context) -> _OptionParser: - """Creates the underlying option parser for this command.""" - parser = _OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: Context | None = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" - ).format(name=self.name, extra_message=extra_message) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: list[CompletionItem] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, Group) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx._protected_args - ) - - return results - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: t.Literal[True] = True, - **extra: t.Any, - ) -> t.NoReturn: ... - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: ... - - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str | None = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: type) -> bool: - return issubclass(subclass, cls.__bases__[0]) - - def __instancecheck__(cls, instance: t.Any) -> bool: - return isinstance(instance, cls.__bases__[0]) - - -class _BaseCommand(Command, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Command`` instead. - """ - - -class Group(Command): - """A group is a command that nests other commands (or more groups). - - :param name: The name of the group command. - :param commands: Map names to :class:`Command` objects. Can be a list, which - will use :attr:`Command.name` as the keys. - :param invoke_without_command: Invoke the group's callback even if a - subcommand is not given. - :param no_args_is_help: If no arguments are given, show the group's help and - exit. Defaults to the opposite of ``invoke_without_command``. - :param subcommand_metavar: How to represent the subcommand argument in help. - The default will represent whether ``chain`` is set or not. - :param chain: Allow passing more than one subcommand argument. After parsing - a command's arguments, if any arguments remain another command will be - matched, and so on. - :param result_callback: A function to call after the group's and - subcommand's callbacks. The value returned by the subcommand is passed. - If ``chain`` is enabled, the value will be a list of values returned by - all the commands. If ``invoke_without_command`` is enabled, the value - will be the value returned by the group's callback, or an empty list if - ``chain`` is enabled. - :param kwargs: Other arguments passed to :class:`Command`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - - .. versionchanged:: 8.2 - Merged with and replaces the ``MultiCommand`` base class. - """ - - allow_extra_args = True - allow_interspersed_args = False - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: type[Command] | None = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: type[Group] | type[type] | None = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: str | None = None, - commands: cabc.MutableMapping[str, Command] - | cabc.Sequence[Command] - | None = None, - invoke_without_command: bool = False, - no_args_is_help: bool | None = None, - subcommand_metavar: str | None = None, - chain: bool = False, - result_callback: t.Callable[..., t.Any] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: cabc.MutableMapping[str, Command] = commands - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "A group in chain mode cannot have optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def add_command(self, cmd: Command, name: str | None = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_nested_chain(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'command(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> Group: ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'group(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> Group: - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - inner = old_callback(value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv # type: ignore[return-value] - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - """Given a context and a command name, this returns a :class:`Command` - object if it exists or returns ``None``. - """ - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> list[str]: - """Returns a list of subcommand names in the order they should appear.""" - return sorted(self.commands) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx._protected_args = rest - ctx.args = [] - elif rest: - ctx._protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx._protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx._protected_args, *ctx.args] - ctx.args = [] - ctx._protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: list[str] - ) -> tuple[str | None, Command | None, list[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if _split_opt(cmd_name)[0]: - self.parse_args(ctx, args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class _MultiCommand(Group, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Group`` instead. - """ - - -class CommandCollection(Group): - """A :class:`Group` that looks up subcommands on other groups. If a command - is not found on this group, each registered source is checked in order. - Parameters on a source are not added to this group, and a source's callback - is not invoked when invoking its commands. In other words, this "flattens" - commands in many groups into this one group. - - :param name: The name of the group command. - :param sources: A list of :class:`Group` objects to look up commands from. - :param kwargs: Other arguments passed to :class:`Group`. - - .. versionchanged:: 8.2 - This is a subclass of ``Group``. Commands are looked up first on this - group, then each of its sources. - """ - - def __init__( - self, - name: str | None = None, - sources: list[Group] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - #: The list of registered groups. - self.sources: list[Group] = sources or [] - - def add_source(self, group: Group) -> None: - """Add a group as a source of commands.""" - self.sources.append(group) - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - rv = super().get_command(ctx, cmd_name) - - if rv is not None: - return rv - - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_nested_chain(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> list[str]: - rv: set[str] = set(super().list_commands(ctx)) - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the argument is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. A deprecated parameter - cannot be required, a ValueError will be raised otherwise. - - .. versionchanged:: 8.2.0 - Introduction of ``deprecated``. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - type: types.ParamType | t.Any | None = None, - required: bool = False, - default: t.Any | t.Callable[[], t.Any] | None = None, - callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, - nargs: int | None = None, - multiple: bool = False, - metavar: str | None = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: str | cabc.Sequence[str] | None = None, - shell_complete: t.Callable[ - [Context, Parameter, str], list[CompletionItem] | list[str] - ] - | None = None, - deprecated: bool | str = False, - ) -> None: - self.name: str | None - self.opts: list[str] - self.secondary_opts: list[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - self.deprecated = deprecated - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - if required and deprecated: - raise ValueError( - f"The {self.param_type_name} '{self.human_readable_name}' " - "is deprecated and still required. A deprecated " - f"{self.param_type_name} cannot be required." - ) - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(param=self, ctx=ctx) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, t.Any] - ) -> tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> str | None: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any | None: - rv: t.Any | None = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] - ) -> tuple[t.Any, list[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - - if ( - self.deprecated - and value is not None - and source - not in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ) - ): - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The {param_type} {name!r} is deprecated." - "{extra_message}" - ).format( - param_type=self.param_type_name, - name=self.human_readable_name, - extra_message=extra_message, - ) - echo(style(message, fg="red"), err=True) - - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - pass - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast("list[CompletionItem]", results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page and error messages. - Normally, environment variables are not shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. A deprecated option cannot be - prompted. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.2 - ``envvar`` used with ``flag_value`` will always use the ``flag_value``, - previously it would use the value of the environment variable. - - .. versionchanged:: 8.1 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - show_default: bool | str | None = None, - prompt: bool | str = False, - confirmation_prompt: bool | str = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: bool | None = None, - flag_value: t.Any | None = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: types.ParamType | t.Any | None = None, - help: str | None = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - deprecated: bool | str = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - default_is_missing = "default" not in attrs - super().__init__( - param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs - ) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: str | None = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - if deprecated: - deprecated_message = ( - f"(DEPRECATED: {deprecated})" - if isinstance(deprecated, str) - else "(DEPRECATED)" - ) - help = help + deprecated_message if help is not None else deprecated_message - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - self.default: t.Any | t.Callable[[], t.Any] - - if is_flag and default_is_missing and not self.required: - if multiple: - self.default = () - else: - self.default = False - - if is_flag and flag_value is None: - flag_value = not self.default - - self.type: types.ParamType - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if deprecated and prompt: - raise ValueError("`deprecated` options cannot use `prompt`.") - - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def get_error_hint(self, ctx: Context) -> str: - result = super().get_error_hint(ctx) - if self.show_envvar: - result += f" (env var: '{self.envvar}')" - return result - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(_split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(_split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError( - f"Could not determine name for option with declarations {decls!r}" - ) - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: cabc.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar(ctx=ctx)}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - - extra = self.get_help_extra(ctx) - extra_items = [] - if "envvars" in extra: - extra_items.append( - _("env var: {var}").format(var=", ".join(extra["envvars"])) - ) - if "default" in extra: - extra_items.append(_("default: {default}").format(default=extra["default"])) - if "range" in extra: - extra_items.append(extra["range"]) - if "required" in extra: - extra_items.append(_(extra["required"])) - - if extra_items: - extra_str = "; ".join(extra_items) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: - extra: types.OptionHelpExtra = {} - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - if isinstance(envvar, str): - extra["envvars"] = (envvar,) - else: - extra["envvars"] = tuple(str(d) for d in envvar) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or (show_default and (default_value is not None)): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = _split_opt( - (self.opts if default_value else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - elif default_value == "": - default_string = '""' - else: - default_string = str(default_value) - - if default_string: - extra["default"] = default_string - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra["range"] = range_str - - if self.required: - extra["required"] = "required" - - return extra - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return t.cast(Option, param).flag_value - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - # If show_default is set to True/False, provide this to `prompt` as well. For - # non-bool values of `show_default`, we use `prompt`'s default behavior - prompt_kwargs: t.Any = {} - if isinstance(self.show_default, bool): - prompt_kwargs["show_default"] = self.show_default - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - **prompt_kwargs, - ) - - def resolve_envvar_value(self, ctx: Context) -> str | None: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - if self.is_flag and self.flag_value: - return str(self.flag_value) - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any | None: - rv: t.Any | None = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, Parameter] - ) -> tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: cabc.Sequence[str], - required: bool | None = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(param=self, ctx=ctx) - if not var: - var = self.name.upper() # type: ignore - if self.deprecated: - var += "!" - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Argument is marked as exposed, but does not have a name.") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}: {decls}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [self.make_metavar(ctx)] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar(ctx)}'" - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/click/decorators.py b/venv/lib/python3.10/site-packages/click/decorators.py deleted file mode 100644 index 21f4c34..0000000 --- a/venv/lib/python3.10/site-packages/click/decorators.py +++ /dev/null @@ -1,551 +0,0 @@ -from __future__ import annotations - -import inspect -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound="_AnyCallable | Command") - - -def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: type[T], ensure: bool = False -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - - obj: T | None - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: str | None = None -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: str | None, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: ... - - -def command( - name: str | _AnyCallable | None = None, - cls: type[CmdType] | None = None, - **attrs: t.Any, -) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function, converted to - lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes - ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, - ``init_data_command`` becomes ``init-data``. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: The name of the command. Defaults to modifying the function's - name as described above. - :param cls: The command class to create. Defaults to :class:`Command`. - - .. versionchanged:: 8.2 - The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are - removed when generating the name. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Callable[[_AnyCallable], t.Any] | None = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast("type[CmdType]", Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - if name is not None: - cmd_name = name - else: - cmd_name = f.__name__.lower().replace("_", "-") - cmd_left, sep, suffix = cmd_name.rpartition("-") - - if sep and suffix in {"command", "cmd", "group", "grp"}: - cmd_name = cmd_left - - cmd = cls(name=cmd_name, callback=f, params=params, **attrs) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: str | None, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: ... - - -def group( - name: str | _AnyCallable | None = None, - cls: type[GrpType] | None = None, - **attrs: t.Any, -) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast("type[GrpType]", Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: str | None = None, - *param_decls: str, - package_name: str | None = None, - prog_name: str | None = None, - message: str | None = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - import importlib.metadata - - try: - version = importlib.metadata.version(package_name) - except importlib.metadata.PackageNotFoundError: - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Pre-configured ``--help`` option which immediately prints the help page - and exits the program. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def show_help(ctx: Context, param: Parameter, value: bool) -> None: - """Callback that print the help page on ```` and exits.""" - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs.setdefault("callback", show_help) - - return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.10/site-packages/click/exceptions.py b/venv/lib/python3.10/site-packages/click/exceptions.py deleted file mode 100644 index f141a83..0000000 --- a/venv/lib/python3.10/site-packages/click/exceptions.py +++ /dev/null @@ -1,308 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .globals import resolve_color_default -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - # The context will be removed by the time we print the message, so cache - # the color settings here to be used later on (in `show`) - self.show_color: bool | None = resolve_color_default() - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=self.show_color, - ) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: Context | None = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: Command | None = self.ctx.command if self.ctx else None - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: str | None = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: str | None = None, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: str | None = None, - param_type: str | None = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: str | None = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message( - param=self.param, ctx=self.ctx - ) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: str | None = None, - possibilities: cabc.Sequence[str] | None = None, - ctx: Context | None = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: Context | None = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class NoArgsIsHelpError(UsageError): - def __init__(self, ctx: Context) -> None: - self.ctx: Context - super().__init__(ctx.get_help(), ctx=ctx) - - def show(self, file: t.IO[t.Any] | None = None) -> None: - echo(self.format_message(), file=file, err=True, color=self.ctx.color) - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: str | None = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/venv/lib/python3.10/site-packages/click/formatting.py b/venv/lib/python3.10/site-packages/click/formatting.py deleted file mode 100644 index 9891f88..0000000 --- a/venv/lib/python3.10/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import _split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: int | None = None - - -def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: - widths: dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: cabc.Iterable[tuple[str, str]], col_count: int -) -> cabc.Iterator[tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: list[tuple[int, bool, str]] = [] - buf: list[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: int | None = None, - max_width: int | None = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent: int = 0 - self.buffer: list[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: cabc.Sequence[tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> cabc.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> cabc.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = _split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.10/site-packages/click/globals.py b/venv/lib/python3.10/site-packages/click/globals.py deleted file mode 100644 index a2f9172..0000000 --- a/venv/lib/python3.10/site-packages/click/globals.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -import typing as t -from threading import local - -if t.TYPE_CHECKING: - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: t.Literal[False] = False) -> Context: ... - - -@t.overload -def get_current_context(silent: bool = ...) -> Context | None: ... - - -def get_current_context(silent: bool = False) -> Context | None: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: Context) -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: bool | None = None) -> bool | None: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/venv/lib/python3.10/site-packages/click/parser.py b/venv/lib/python3.10/site-packages/click/parser.py deleted file mode 100644 index a8b7d26..0000000 --- a/venv/lib/python3.10/site-packages/click/parser.py +++ /dev/null @@ -1,532 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" - -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] -) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: list[str | tuple[str | None, ...] | None] = [] - spos: int | None = None - - def _fetch(c: deque[V]) -> V | None: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def _split_opt(opt: str) -> tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def _normalize_opt(opt: str, ctx: Context | None) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = _split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -class _Option: - def __init__( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: set[str] = set() - - for opt in opts: - prefix, value = _split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: _ParsingState) -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class _Argument: - def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: str | cabc.Sequence[str | None] | None, - state: _ParsingState, - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class _ParsingState: - def __init__(self, rargs: list[str]) -> None: - self.opts: dict[str, t.Any] = {} - self.largs: list[str] = [] - self.rargs = rargs - self.order: list[CoreParameter] = [] - - -class _OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - - .. deprecated:: 8.2 - Will be removed in Click 9.0. - """ - - def __init__(self, ctx: Context | None = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: dict[str, _Option] = {} - self._long_opt: dict[str, _Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: list[_Argument] = [] - - def add_option( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [_normalize_opt(opt, self.ctx) for opt in opts] - option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(_Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: list[str] - ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = _ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: _ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: _ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: str | None, state: _ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: _ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = _normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: _Option, state: _ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: _ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = _normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) - - -def __getattr__(name: str) -> object: - import warnings - - if name in { - "OptionParser", - "Argument", - "Option", - "split_opt", - "normalize_opt", - "ParsingState", - }: - warnings.warn( - f"'parser.{name}' is deprecated and will be removed in Click 9.0." - " The old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return globals()[f"_{name}"] - - if name == "split_arg_string": - from .shell_completion import split_arg_string - - warnings.warn( - "Importing 'parser.split_arg_string' is deprecated, it will only be" - " available in 'shell_completion' in Click 9.0.", - DeprecationWarning, - stacklevel=2, - ) - return split_arg_string - - raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/click/py.typed b/venv/lib/python3.10/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/click/shell_completion.py b/venv/lib/python3.10/site-packages/click/shell_completion.py deleted file mode 100644 index 6c39d5e..0000000 --- a/venv/lib/python3.10/site-packages/click/shell_completion.py +++ /dev/null @@ -1,644 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .utils import echo - - -def shell_complete( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: str | None = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: str | None = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> tuple[list[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import shutil - import subprocess - - bash_exe = shutil.which("bash") - - if bash_exe is None: - match = None - else: - output = subprocess.run( - [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], - stdout=subprocess.PIPE, - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") - - -_available_shells: dict[str, type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: str | None = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> type[ShellComplete] | None: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def split_arg_string(string: str) -> list[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - - .. versionchanged:: 8.2 - Moved to ``shell_completion`` from ``parser``. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - args: list[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: - args = ctx._protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, Group): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, args, parent=ctx, resilient_parsing=True - ) as sub_ctx: - ctx = sub_ctx - args = ctx._protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) as sub_sub_ctx: - sub_ctx = sub_sub_ctx - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx._protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: list[str], incomplete: str -) -> tuple[Command | Parameter, str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/venv/lib/python3.10/site-packages/click/termui.py b/venv/lib/python3.10/site-packages/click/termui.py deleted file mode 100644 index dcbb222..0000000 --- a/venv/lib/python3.10/site-packages/click/termui.py +++ /dev/null @@ -1,877 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import inspect -import io -import itertools -import sys -import typing as t -from contextlib import AbstractContextManager -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Any | None = None, - show_choices: bool = True, - type: ParamType | None = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Any | None = None, - hide_input: bool = False, - confirmation_prompt: bool | str = False, - type: ParamType | t.Any | None = None, - value_proc: t.Callable[[str], t.Any] | None = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: bool | None = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, - color: bool | None = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -@t.overload -def progressbar( - *, - length: int, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[int]: ... - - -@t.overload -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: ... - - -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param hidden: hide the progressbar. Defaults to ``False``. When no tty is - detected, it will only print the progressbar label. Setting this to - ``False`` also disables that. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionadded:: 8.2 - The ``hidden`` argument. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - The ``update_min_steps`` parameter. - - .. versionadded:: 4.0 - The ``color`` parameter and ``update`` method. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - hidden=hidden, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: int | tuple[int, int, int] | str | None = None, - bg: int | tuple[int, int, int] | str | None = None, - bold: bool | None = None, - dim: bool | None = None, - underline: bool | None = None, - overline: bool | None = None, - italic: bool | None = None, - blink: bool | None = None, - reverse: bool | None = None, - strikethrough: bool | None = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Any | None = None, - file: t.IO[t.AnyStr] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -@t.overload -def edit( - text: bytes | bytearray, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = False, - extension: str = ".txt", -) -> bytes | None: ... - - -@t.overload -def edit( - text: str, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", -) -> str | None: ... - - -@t.overload -def edit( - text: None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> None: ... - - -def edit( - text: str | bytes | bytearray | None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> str | bytes | bytearray | None: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. If the editor supports - editing multiple files at once, a sequence of files may be - passed as well. Invoke `click.file` once per file instead - if multiple files cannot be managed at once or editing the - files serially is desired. - - .. versionchanged:: 8.2.0 - ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` - if the ``editor`` supports editing multiple files at once. - - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - if isinstance(filename, str): - filename = (filename,) - - ed.edit_files(filenames=filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Callable[[bool], str] | None = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> AbstractContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: str | None = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv/lib/python3.10/site-packages/click/testing.py b/venv/lib/python3.10/site-packages/click/testing.py deleted file mode 100644 index 7c0e874..0000000 --- a/venv/lib/python3.10/site-packages/click/testing.py +++ /dev/null @@ -1,565 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import _compat -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from _typeshed import ReadableBuffer - - from .core import Command - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> list[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> cabc.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class BytesIOCopy(io.BytesIO): - """Patch ``io.BytesIO`` to let the written stream be copied to another. - - .. versionadded:: 8.2 - """ - - def __init__(self, copy_to: io.BytesIO) -> None: - super().__init__() - self.copy_to = copy_to - - def flush(self) -> None: - super().flush() - self.copy_to.flush() - - def write(self, b: ReadableBuffer) -> int: - self.copy_to.write(b) - return super().write(b) - - -class StreamMixer: - """Mixes `` and `` streams. - - The result is available in the ``output`` attribute. - - .. versionadded:: 8.2 - """ - - def __init__(self) -> None: - self.output: io.BytesIO = io.BytesIO() - self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) - self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - def __next__(self) -> str: # type: ignore - try: - line = super().__next__() - except StopIteration as e: - raise EOFError() from e - return line - - -def make_input_stream( - input: str | bytes | t.IO[t.Any] | None, charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script. - - :param runner: The runner that created the result - :param stdout_bytes: The standard output as bytes. - :param stderr_bytes: The standard error as bytes. - :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the - user would see it in its terminal. - :param return_value: The value returned from the invoked command. - :param exit_code: The exit code as integer. - :param exception: The exception that happened if one did. - :param exc_info: Exception information (exception type, exception instance, - traceback type). - - .. versionchanged:: 8.2 - ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and - ``mix_stderr`` has been removed. - - .. versionadded:: 8.0 - Added ``return_value``. - """ - - def __init__( - self, - runner: CliRunner, - stdout_bytes: bytes, - stderr_bytes: bytes, - output_bytes: bytes, - return_value: t.Any, - exit_code: int, - exception: BaseException | None, - exc_info: tuple[type[BaseException], BaseException, TracebackType] - | None = None, - ): - self.runner = runner - self.stdout_bytes = stdout_bytes - self.stderr_bytes = stderr_bytes - self.output_bytes = output_bytes - self.return_value = return_value - self.exit_code = exit_code - self.exception = exception - self.exc_info = exc_info - - @property - def output(self) -> str: - """The terminal output as unicode string, as the user would see it. - - .. versionchanged:: 8.2 - No longer a proxy for ``self.stdout``. Now has its own independent stream - that is mixing `` and ``, in the order they were written. - """ - return self.output_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string. - - .. versionchanged:: 8.2 - No longer raise an exception, always returns the `` string. - """ - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from `` writes - to ``. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param catch_exceptions: Whether to catch any exceptions other than - ``SystemExit`` when running :meth:`~CliRunner.invoke`. - - .. versionchanged:: 8.2 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 8.2 - ``mix_stderr`` parameter has been removed. - """ - - def __init__( - self, - charset: str = "utf-8", - env: cabc.Mapping[str, str | None] | None = None, - echo_stdin: bool = False, - catch_exceptions: bool = True, - ) -> None: - self.charset = charset - self.env: cabc.Mapping[str, str | None] = env or {} - self.echo_stdin = echo_stdin - self.catch_exceptions = catch_exceptions - - def get_default_prog_name(self, cli: Command) -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: cabc.Mapping[str, str | None] | None = None - ) -> cabc.Mapping[str, str | None]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - color: bool = False, - ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up `` with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into `sys.stdin`. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - An additional output stream is returned, which is a mix of - `` and `` streams. - - .. versionchanged:: 8.2 - Always returns the `` stream. - - .. versionchanged:: 8.0 - `` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - stream_mixer = StreamMixer() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - stream_mixer.stdout, encoding=self.charset, name="", mode="w" - ) - - sys.stderr = _NamedTextIOWrapper( - stream_mixer.stderr, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: str | None = None) -> str: - sys.stdout.write(prompt or "") - val = next(text_input).rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: str | None = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return next(text_input).rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - old__compat_should_strip_ansi = _compat.should_strip_ansi - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - _compat.should_strip_ansi = should_strip_ansi - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - _compat.should_strip_ansi = old__compat_should_strip_ansi - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: Command, - args: str | cabc.Sequence[str] | None = None, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - catch_exceptions: bool | None = None, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. If :data:`None`, the value - from :class:`CliRunner` is used. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - The result object has the ``output_bytes`` attribute with - the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would - see it in its terminal. - - .. versionchanged:: 8.2 - The result object always returns the ``stderr_bytes`` stream. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - if catch_exceptions is None: - catch_exceptions = self.catch_exceptions - - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: BaseException | None = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast("int | t.Any | None", e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - sys.stderr.flush() - stdout = outstreams[0].getvalue() - stderr = outstreams[1].getvalue() - output = outstreams[2].getvalue() - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - output_bytes=output, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: str | os.PathLike[str] | None = None - ) -> cabc.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(dt) - except OSError: - pass diff --git a/venv/lib/python3.10/site-packages/click/types.py b/venv/lib/python3.10/site-packages/click/types.py deleted file mode 100644 index 684cb3b..0000000 --- a/venv/lib/python3.10/site-packages/click/types.py +++ /dev/null @@ -1,1165 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - -ParamTypeValue = t.TypeVar("ParamTypeValue") - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[str | None] = None - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.NoReturn: - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType, t.Generic[ParamTypeValue]): - """The choice type allows a value to be checked against a fixed set - of supported values. - - You may pass any iterable value which will be converted to a tuple - and thus will only be iterated once. - - The resulting value will always be one of the originally passed choices. - See :meth:`normalize_choice` for more info on the mapping of strings - to choices. See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - - .. versionchanged:: 8.2.0 - Non-``str`` ``choices`` are now supported. It can additionally be any - iterable. Before you were not recommended to pass anything but a list or - tuple. - - .. versionadded:: 8.2.0 - Choice normalization can be overridden via :meth:`normalize_choice`. - """ - - name = "choice" - - def __init__( - self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True - ) -> None: - self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def _normalized_mapping( - self, ctx: Context | None = None - ) -> cabc.Mapping[ParamTypeValue, str]: - """ - Returns mapping where keys are the original choices and the values are - the normalized values that are accepted via the command line. - - This is a simple wrapper around :meth:`normalize_choice`, use that - instead which is supported. - """ - return { - choice: self.normalize_choice( - choice=choice, - ctx=ctx, - ) - for choice in self.choices - } - - def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: - """ - Normalize a choice value, used to map a passed string to a choice. - Each choice must have a unique normalized value. - - By default uses :meth:`Context.token_normalize_func` and if not case - sensitive, convert it to a casefolded value. - - .. versionadded:: 8.2.0 - """ - normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(normed_value) - - if not self.case_sensitive: - normed_value = normed_value.casefold() - - return normed_value - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - if param.param_type_name == "option" and not param.show_choices: # type: ignore - choice_metavars = [ - convert_type(type(choice)).name.upper() for choice in self.choices - ] - choices_str = "|".join([*dict.fromkeys(choice_metavars)]) - else: - choices_str = "|".join( - [str(i) for i in self._normalized_mapping(ctx=ctx).values()] - ) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: - """ - Message shown when no choice is passed. - - .. versionchanged:: 8.2.0 Added ``ctx`` argument. - """ - return _("Choose from:\n\t{choices}").format( - choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) - ) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> ParamTypeValue: - """ - For a given value from the parser, normalize it and find its - matching normalized value in the list of choices. Then return the - matched "original" choice. - """ - normed_value = self.normalize_choice(choice=value, ctx=ctx) - normalized_mapping = self._normalized_mapping(ctx=ctx) - - try: - return next( - original - for original, normalized in normalized_mapping.items() - if normalized == normed_value - ) - except StopIteration: - self.fail( - self.get_invalid_choice_message(value=value, ctx=ctx), - param=param, - ctx=ctx, - ) - - def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: - """Get the error message when the given choice is invalid. - - :param value: The invalid value. - - .. versionadded:: 8.2 - """ - choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) - return ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: cabc.Sequence[str] | None = None): - self.formats: cabc.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[type[t.Any]] - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: t.Literal[1, -1], open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - if not open: - return bound - - # Could use math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Files can also be opened atomically in which case all writes go into a - separate file in the same folder and upon completion the file will - be moved over to the original location. This is useful if a file - regularly read by other users is modified. - - See :ref:`file-args` for more information. - - .. versionchanged:: 2.0 - Added the ``atomic`` parameter. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool | None = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: str | os.PathLike[str] | t.IO[t.Any], - param: Parameter | None, - ctx: Context | None, - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("str | os.PathLike[str]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast("t.IO[t.Any]", lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: type[t.Any] | None = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: str | os.PathLike[str] - ) -> str | bytes | os.PathLike[str]: - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: str | os.PathLike[str], - param: Parameter | None, - ctx: Context | None, - ) -> str | bytes | os.PathLike[str]: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - rv = os.path.realpath(rv) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} {filename!r} is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: - self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple( - ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) - ) - - -def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() - - -class OptionHelpExtra(t.TypedDict, total=False): - envvars: tuple[str, ...] - default: str - range: str - required: str diff --git a/venv/lib/python3.10/site-packages/click/utils.py b/venv/lib/python3.10/site-packages/click/utils.py deleted file mode 100644 index ab2fe58..0000000 --- a/venv/lib/python3.10/site-packages/click/utils.py +++ /dev/null @@ -1,627 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.IO[t.Any] | None - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> LazyFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close_intelligently() - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> KeepOpenFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Any | None = None, - file: t.IO[t.Any] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: str | bytes | None = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file, color) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: t.Literal["stdin", "stdout", "stderr"], - encoding: str | None = None, - errors: str | None = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name or Path of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) - - return f - - -def format_filename( - filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict"``. This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: str | None = None, _main: ModuleType | None = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: cabc.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> list[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/METADATA b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/METADATA deleted file mode 100644 index 8f69ac5..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/METADATA +++ /dev/null @@ -1,139 +0,0 @@ -Metadata-Version: 2.4 -Name: cryptography -Version: 45.0.5 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: POSIX -Classifier: Operating System :: POSIX :: BSD -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Security :: Cryptography -Requires-Dist: cffi>=1.14 ; platform_python_implementation != 'PyPy' -Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh' -Requires-Dist: nox>=2024.4.15 ; extra == 'nox' -Requires-Dist: nox[uv]>=2024.3.2 ; python_full_version >= '3.8' and extra == 'nox' -Requires-Dist: cryptography-vectors==45.0.5 ; extra == 'test' -Requires-Dist: pytest>=7.4.0 ; extra == 'test' -Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test' -Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test' -Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test' -Requires-Dist: pretend>=0.7 ; extra == 'test' -Requires-Dist: certifi>=2024 ; extra == 'test' -Requires-Dist: pytest-randomly ; extra == 'test-randomorder' -Requires-Dist: sphinx>=5.3.0 ; extra == 'docs' -Requires-Dist: sphinx-rtd-theme>=3.0.0 ; python_full_version >= '3.8' and extra == 'docs' -Requires-Dist: sphinx-inline-tabs ; python_full_version >= '3.8' and extra == 'docs' -Requires-Dist: pyenchant>=3 ; extra == 'docstest' -Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest' -Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest' -Requires-Dist: build>=1.0.0 ; extra == 'sdist' -Requires-Dist: ruff>=0.3.6 ; extra == 'pep8test' -Requires-Dist: mypy>=1.4 ; extra == 'pep8test' -Requires-Dist: check-sdist ; python_full_version >= '3.8' and extra == 'pep8test' -Requires-Dist: click>=8.0.1 ; extra == 'pep8test' -Provides-Extra: ssh -Provides-Extra: nox -Provides-Extra: test -Provides-Extra: test-randomorder -Provides-Extra: docs -Provides-Extra: docstest -Provides-Extra: sdist -Provides-Extra: pep8test -License-File: LICENSE -License-File: LICENSE.APACHE -License-File: LICENSE.BSD -Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. -Author: The cryptography developers -Author-email: The Python Cryptographic Authority and individual contributors -License: Apache-2.0 OR BSD-3-Clause -Requires-Python: >=3.7, !=3.9.0, !=3.9.1 -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: homepage, https://github.com/pyca/cryptography -Project-URL: documentation, https://cryptography.io/ -Project-URL: source, https://github.com/pyca/cryptography/ -Project-URL: issues, https://github.com/pyca/cryptography/issues -Project-URL: changelog, https://cryptography.io/en/latest/changelog/ - -pyca/cryptography -================= - -.. image:: https://img.shields.io/pypi/v/cryptography.svg - :target: https://pypi.org/project/cryptography/ - :alt: Latest Version - -.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest - :target: https://cryptography.io - :alt: Latest Docs - -.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main - :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain - - -``cryptography`` is a package which provides cryptographic recipes and -primitives to Python developers. Our goal is for it to be your "cryptographic -standard library". It supports Python 3.7+ and PyPy3 7.3.11+. - -``cryptography`` includes both high level recipes and low level interfaces to -common cryptographic algorithms such as symmetric ciphers, message digests, and -key derivation functions. For example, to encrypt something with -``cryptography``'s high level symmetric encryption recipe: - -.. code-block:: pycon - - >>> from cryptography.fernet import Fernet - >>> # Put this somewhere safe! - >>> key = Fernet.generate_key() - >>> f = Fernet(key) - >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") - >>> token - b'...' - >>> f.decrypt(token) - b'A really secret message. Not for prying eyes.' - -You can find more information in the `documentation`_. - -You can install ``cryptography`` with: - -.. code-block:: console - - $ pip install cryptography - -For full details see `the installation documentation`_. - -Discussion -~~~~~~~~~~ - -If you run into bugs, you can file them in our `issue tracker`_. - -We maintain a `cryptography-dev`_ mailing list for development discussion. - -You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get -involved. - -Security -~~~~~~~~ - -Need to report a security issue? Please consult our `security reporting`_ -documentation. - - -.. _`documentation`: https://cryptography.io/ -.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ -.. _`issue tracker`: https://github.com/pyca/cryptography/issues -.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev -.. _`security reporting`: https://cryptography.io/en/latest/security/ - diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/RECORD b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/RECORD deleted file mode 100644 index 45e51fc..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/RECORD +++ /dev/null @@ -1,175 +0,0 @@ -cryptography-45.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cryptography-45.0.5.dist-info/METADATA,sha256=Dn2Ls7PhdM88VKgRTBFDskPTSNeGHm7SezAcdIPYdKc,5690 -cryptography-45.0.5.dist-info/RECORD,, -cryptography-45.0.5.dist-info/WHEEL,sha256=VqNc6WOTGHZndw6FKXcyBqGvpOjDTrsbnnH79sUpcvY,107 -cryptography-45.0.5.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 -cryptography-45.0.5.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 -cryptography-45.0.5.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 -cryptography/__about__.py,sha256=X8cr9wNd4PDeKHYUt6ppLOYb-V56WYSgLBuYKlivuBY,445 -cryptography/__init__.py,sha256=QDuQ3te_14R9iGF-2Q5yEwD1FTJrCWqpnuhlMIJBsno,762 -cryptography/__pycache__/__about__.cpython-310.pyc,, -cryptography/__pycache__/__init__.cpython-310.pyc,, -cryptography/__pycache__/exceptions.cpython-310.pyc,, -cryptography/__pycache__/fernet.cpython-310.pyc,, -cryptography/__pycache__/utils.cpython-310.pyc,, -cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 -cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963 -cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 -cryptography/hazmat/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/__pycache__/_oid.cpython-310.pyc,, -cryptography/hazmat/_oid.py,sha256=7-aiQLWCYT7FDrQ0sUWBNYkN5ADEGTZwLQE_ZCksPxs,16795 -cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 -cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 -cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/backend.py,sha256=ev-Prba_u1bCMk3ZWspkdOuLBbDlf_7wJnHK6xdYs7c,10329 -cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/bindings/_rust.abi3.so,sha256=BtscyLV0h1_Wy8zPS9g0OAG4KOJsnqLVG1w4ZqZnr5s,12446840 -cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257 -cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 -cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 -cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 -cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020 -cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522 -cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688 -cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315 -cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 -cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 -cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 -cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 -cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532 -cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514 -cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984 -cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702 -cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=4FGpmCR2H8SVGoe4SUX7h5wzhyvuRi8hnzKKfvnJJrw,1379 -cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912 -cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585 -cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 -cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523 -cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505 -cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605 -cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601 -cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757 -cryptography/hazmat/bindings/_rust/x509.pyi,sha256=WWFr7RV3n1eFYbLpmM_xwXGKYZalyYi_V07p6KLOHX8,10189 -cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/_conditional.py,sha256=eKiDLdRSXTGA6mQ8jj5S2OXEB_sod6iL7cwed-yOuUw,5340 -cryptography/hazmat/bindings/openssl/binding.py,sha256=hAmRXYzQlpIAP_jj2tl-SIBJBxwtclLlr_3aIL-RkR0,4072 -cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595 -cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc,, -cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 -cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522 -cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123 -cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645 -cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213 -cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999 -cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700 -cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729 -cryptography/hazmat/primitives/asymmetric/padding.py,sha256=eZcvUqVLbe3u48SunLdeniaPlV4-k6pwBl67OW4jSy8,2885 -cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=kegiZAGeb6yJISjpxpaAEpM3wGdfhJSU9RgZpVwKYwk,7967 -cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 -cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 -cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613 -cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642 -cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 -cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 -cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=S04j4NdFCBDt5KLIvbYohohRO--MCWhTia_lkVt8xD8,4542 -cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253 -cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318 -cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 -cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 -cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184 -cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 -cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 -cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 -cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737 -cryptography/hazmat/primitives/kdf/hkdf.py,sha256=2HlP_huUzGAy_FZWOn8L9qP9B5ke7XKmbiER7_2l__Q,3043 -cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=tGH5dQGEVaKYMEbgeqlaSPpqhlDMoF0xwCxk2vuhcTc,9211 -cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957 -cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 -cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999 -cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 -cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865 -cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 -cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705 -cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 -cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104 -cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943 -cryptography/hazmat/primitives/serialization/ssh.py,sha256=CeDSD_KmtcpQDSUg8QY3jruZDOKjjWBKcUByDOGQXnY,53693 -cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 -cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256 -cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652 -cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -cryptography/utils.py,sha256=r90VtamCjiba37omFqp2aa--eEQVXiQIMrvWeLT-BY4,4397 -cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257 -cryptography/x509/__pycache__/__init__.cpython-310.pyc,, -cryptography/x509/__pycache__/base.cpython-310.pyc,, -cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc,, -cryptography/x509/__pycache__/extensions.cpython-310.pyc,, -cryptography/x509/__pycache__/general_name.cpython-310.pyc,, -cryptography/x509/__pycache__/name.cpython-310.pyc,, -cryptography/x509/__pycache__/ocsp.cpython-310.pyc,, -cryptography/x509/__pycache__/oid.cpython-310.pyc,, -cryptography/x509/__pycache__/verification.cpython-310.pyc,, -cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997 -cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 -cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724 -cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 -cryptography/x509/name.py,sha256=oi0CqY_B72CEJgNXy9XkJya26QWXfwOeGJBYfXjRYvI,15121 -cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699 -cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931 -cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958 diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/WHEEL b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/WHEEL deleted file mode 100644 index 81333fb..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: maturin (1.8.6) -Root-Is-Purelib: false -Tag: cp37-abi3-manylinux_2_34_x86_64 - diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE deleted file mode 100644 index b11f379..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made -under the terms of *both* these licenses. diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.APACHE b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.APACHE deleted file mode 100644 index 62589ed..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.APACHE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.BSD b/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.BSD deleted file mode 100644 index ec1a29d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography-45.0.5.dist-info/licenses/LICENSE.BSD +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) Individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of PyCA Cryptography nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/cryptography/__about__.py b/venv/lib/python3.10/site-packages/cryptography/__about__.py deleted file mode 100644 index 97168d2..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/__about__.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] - -__version__ = "45.0.5" - - -__author__ = "The Python Cryptographic Authority and individual contributors" -__copyright__ = f"Copyright 2013-2025 {__author__}" diff --git a/venv/lib/python3.10/site-packages/cryptography/__init__.py b/venv/lib/python3.10/site-packages/cryptography/__init__.py deleted file mode 100644 index e8febfb..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import sys -import warnings - -from cryptography import utils -from cryptography.__about__ import __author__, __copyright__, __version__ - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] - -if sys.version_info[:2] == (3, 7): - warnings.warn( - "Python 3.7 is no longer supported by the Python core team " - "and support for it is deprecated in cryptography. The next release " - "of cryptography will remove support for Python 3.7.", - utils.CryptographyDeprecationWarning, - stacklevel=2, - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc deleted file mode 100644 index ef23664..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 9cba563..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc deleted file mode 100644 index 709c8a2..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc deleted file mode 100644 index b8d1ef1..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 5287539..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/exceptions.py b/venv/lib/python3.10/site-packages/cryptography/exceptions.py deleted file mode 100644 index fe125ea..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/exceptions.py +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions - -if typing.TYPE_CHECKING: - from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -_Reasons = rust_exceptions._Reasons - - -class UnsupportedAlgorithm(Exception): - def __init__(self, message: str, reason: _Reasons | None = None) -> None: - super().__init__(message) - self._reason = reason - - -class AlreadyFinalized(Exception): - pass - - -class AlreadyUpdated(Exception): - pass - - -class NotYetFinalized(Exception): - pass - - -class InvalidTag(Exception): - pass - - -class InvalidSignature(Exception): - pass - - -class InternalError(Exception): - def __init__( - self, msg: str, err_code: list[rust_openssl.OpenSSLError] - ) -> None: - super().__init__(msg) - self.err_code = err_code - - -class InvalidKey(Exception): - pass diff --git a/venv/lib/python3.10/site-packages/cryptography/fernet.py b/venv/lib/python3.10/site-packages/cryptography/fernet.py deleted file mode 100644 index c6744ae..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/fernet.py +++ /dev/null @@ -1,224 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import binascii -import os -import time -import typing -from collections.abc import Iterable - -from cryptography import utils -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes, padding -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.primitives.hmac import HMAC - - -class InvalidToken(Exception): - pass - - -_MAX_CLOCK_SKEW = 60 - - -class Fernet: - def __init__( - self, - key: bytes | str, - backend: typing.Any = None, - ) -> None: - try: - key = base64.urlsafe_b64decode(key) - except binascii.Error as exc: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) from exc - if len(key) != 32: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) - - self._signing_key = key[:16] - self._encryption_key = key[16:] - - @classmethod - def generate_key(cls) -> bytes: - return base64.urlsafe_b64encode(os.urandom(32)) - - def encrypt(self, data: bytes) -> bytes: - return self.encrypt_at_time(data, int(time.time())) - - def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: - iv = os.urandom(16) - return self._encrypt_from_parts(data, current_time, iv) - - def _encrypt_from_parts( - self, data: bytes, current_time: int, iv: bytes - ) -> bytes: - utils._check_bytes("data", data) - - padder = padding.PKCS7(algorithms.AES.block_size).padder() - padded_data = padder.update(data) + padder.finalize() - encryptor = Cipher( - algorithms.AES(self._encryption_key), - modes.CBC(iv), - ).encryptor() - ciphertext = encryptor.update(padded_data) + encryptor.finalize() - - basic_parts = ( - b"\x80" - + current_time.to_bytes(length=8, byteorder="big") - + iv - + ciphertext - ) - - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(basic_parts) - hmac = h.finalize() - return base64.urlsafe_b64encode(basic_parts + hmac) - - def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(token) - if ttl is None: - time_info = None - else: - time_info = (ttl, int(time.time())) - return self._decrypt_data(data, timestamp, time_info) - - def decrypt_at_time( - self, token: bytes | str, ttl: int, current_time: int - ) -> bytes: - if ttl is None: - raise ValueError( - "decrypt_at_time() can only be used with a non-None ttl" - ) - timestamp, data = Fernet._get_unverified_token_data(token) - return self._decrypt_data(data, timestamp, (ttl, current_time)) - - def extract_timestamp(self, token: bytes | str) -> int: - timestamp, data = Fernet._get_unverified_token_data(token) - # Verify the token was not tampered with. - self._verify_signature(data) - return timestamp - - @staticmethod - def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: - if not isinstance(token, (str, bytes)): - raise TypeError("token must be bytes or str") - - try: - data = base64.urlsafe_b64decode(token) - except (TypeError, binascii.Error): - raise InvalidToken - - if not data or data[0] != 0x80: - raise InvalidToken - - if len(data) < 9: - raise InvalidToken - - timestamp = int.from_bytes(data[1:9], byteorder="big") - return timestamp, data - - def _verify_signature(self, data: bytes) -> None: - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(data[:-32]) - try: - h.verify(data[-32:]) - except InvalidSignature: - raise InvalidToken - - def _decrypt_data( - self, - data: bytes, - timestamp: int, - time_info: tuple[int, int] | None, - ) -> bytes: - if time_info is not None: - ttl, current_time = time_info - if timestamp + ttl < current_time: - raise InvalidToken - - if current_time + _MAX_CLOCK_SKEW < timestamp: - raise InvalidToken - - self._verify_signature(data) - - iv = data[9:25] - ciphertext = data[25:-32] - decryptor = Cipher( - algorithms.AES(self._encryption_key), modes.CBC(iv) - ).decryptor() - plaintext_padded = decryptor.update(ciphertext) - try: - plaintext_padded += decryptor.finalize() - except ValueError: - raise InvalidToken - unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() - - unpadded = unpadder.update(plaintext_padded) - try: - unpadded += unpadder.finalize() - except ValueError: - raise InvalidToken - return unpadded - - -class MultiFernet: - def __init__(self, fernets: Iterable[Fernet]): - fernets = list(fernets) - if not fernets: - raise ValueError( - "MultiFernet requires at least one Fernet instance" - ) - self._fernets = fernets - - def encrypt(self, msg: bytes) -> bytes: - return self.encrypt_at_time(msg, int(time.time())) - - def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: - return self._fernets[0].encrypt_at_time(msg, current_time) - - def rotate(self, msg: bytes | str) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(msg) - for f in self._fernets: - try: - p = f._decrypt_data(data, timestamp, None) - break - except InvalidToken: - pass - else: - raise InvalidToken - - iv = os.urandom(16) - return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) - - def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: - for f in self._fernets: - try: - return f.decrypt(msg, ttl) - except InvalidToken: - pass - raise InvalidToken - - def decrypt_at_time( - self, msg: bytes | str, ttl: int, current_time: int - ) -> bytes: - for f in self._fernets: - try: - return f.decrypt_at_time(msg, ttl, current_time) - except InvalidToken: - pass - raise InvalidToken - - def extract_timestamp(self, msg: bytes | str) -> int: - for f in self._fernets: - try: - return f.extract_timestamp(msg) - except InvalidToken: - pass - raise InvalidToken diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/__init__.py deleted file mode 100644 index b9f1187..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -""" -Hazardous Materials - -This is a "Hazardous Materials" module. You should ONLY use it if you're -100% absolutely sure that you know what you're doing because this module -is full of land mines, dragons, and dinosaurs with laser guns. -""" diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index f445105..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc deleted file mode 100644 index 74cfde1..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/_oid.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/_oid.py deleted file mode 100644 index 249b4dc..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/_oid.py +++ /dev/null @@ -1,348 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import ( - ObjectIdentifier as ObjectIdentifier, -) -from cryptography.hazmat.primitives import hashes - - -class ExtensionOID: - SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") - SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") - KEY_USAGE = ObjectIdentifier("2.5.29.15") - PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16") - SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") - ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") - BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") - NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") - CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") - CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") - POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") - AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") - POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") - EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") - FRESHEST_CRL = ObjectIdentifier("2.5.29.46") - INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") - ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") - AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") - SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") - OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") - TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") - CRL_NUMBER = ObjectIdentifier("2.5.29.20") - DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") - PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( - "1.3.6.1.4.1.11129.2.4.2" - ) - PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") - SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") - MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") - ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3") - - -class OCSPExtensionOID: - NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") - ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") - - -class CRLEntryExtensionOID: - CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") - CRL_REASON = ObjectIdentifier("2.5.29.21") - INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") - - -class NameOID: - COMMON_NAME = ObjectIdentifier("2.5.4.3") - COUNTRY_NAME = ObjectIdentifier("2.5.4.6") - LOCALITY_NAME = ObjectIdentifier("2.5.4.7") - STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") - STREET_ADDRESS = ObjectIdentifier("2.5.4.9") - ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") - ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") - ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") - SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") - SURNAME = ObjectIdentifier("2.5.4.4") - GIVEN_NAME = ObjectIdentifier("2.5.4.42") - TITLE = ObjectIdentifier("2.5.4.12") - INITIALS = ObjectIdentifier("2.5.4.43") - GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") - X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") - DN_QUALIFIER = ObjectIdentifier("2.5.4.46") - PSEUDONYM = ObjectIdentifier("2.5.4.65") - USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") - DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") - EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") - JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") - JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") - JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( - "1.3.6.1.4.1.311.60.2.1.2" - ) - BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") - POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") - POSTAL_CODE = ObjectIdentifier("2.5.4.17") - INN = ObjectIdentifier("1.2.643.3.131.1.1") - OGRN = ObjectIdentifier("1.2.643.100.1") - SNILS = ObjectIdentifier("1.2.643.100.3") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -class SignatureAlgorithmOID: - RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") - RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") - # This is an alternate OID for RSA with SHA1 that is occasionally seen - _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") - RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") - RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") - RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") - RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") - RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") - RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") - RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") - RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") - ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") - ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") - ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") - ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") - ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") - ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") - ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") - ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") - DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") - DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") - DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") - DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") - DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") - GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") - GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") - - -_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { - SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), - SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ED25519: None, - SignatureAlgorithmOID.ED448: None, - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, -} - - -class HashAlgorithmOID: - SHA1 = ObjectIdentifier("1.3.14.3.2.26") - SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4") - SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1") - SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2") - SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3") - SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224") - SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256") - SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384") - SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512") - - -class PublicKeyAlgorithmOID: - DSA = ObjectIdentifier("1.2.840.10040.4.1") - EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") - RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - X25519 = ObjectIdentifier("1.3.101.110") - X448 = ObjectIdentifier("1.3.101.111") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - - -class ExtendedKeyUsageOID: - SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") - CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") - CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") - EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") - TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") - OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") - ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") - SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") - KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") - IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") - BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35") - CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") - - -class OtherNameFormOID: - PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3") - HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4") - DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7") - NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8") - SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9") - ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10") - BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11") - - -class AuthorityInformationAccessOID: - CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") - OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") - - -class SubjectInformationAccessOID: - CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") - - -class CertificatePoliciesOID: - CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") - CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") - ANY_POLICY = ObjectIdentifier("2.5.29.32.0") - - -class AttributeOID: - CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -_OID_NAMES = { - NameOID.COMMON_NAME: "commonName", - NameOID.COUNTRY_NAME: "countryName", - NameOID.LOCALITY_NAME: "localityName", - NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", - NameOID.STREET_ADDRESS: "streetAddress", - NameOID.ORGANIZATION_NAME: "organizationName", - NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", - NameOID.SERIAL_NUMBER: "serialNumber", - NameOID.SURNAME: "surname", - NameOID.GIVEN_NAME: "givenName", - NameOID.TITLE: "title", - NameOID.GENERATION_QUALIFIER: "generationQualifier", - NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", - NameOID.DN_QUALIFIER: "dnQualifier", - NameOID.PSEUDONYM: "pseudonym", - NameOID.USER_ID: "userID", - NameOID.DOMAIN_COMPONENT: "domainComponent", - NameOID.EMAIL_ADDRESS: "emailAddress", - NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", - NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", - NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( - "jurisdictionStateOrProvinceName" - ), - NameOID.BUSINESS_CATEGORY: "businessCategory", - NameOID.POSTAL_ADDRESS: "postalAddress", - NameOID.POSTAL_CODE: "postalCode", - NameOID.INN: "INN", - NameOID.OGRN: "OGRN", - NameOID.SNILS: "SNILS", - NameOID.UNSTRUCTURED_NAME: "unstructuredName", - SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", - SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss", - SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", - SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", - SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", - SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", - SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", - SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", - SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", - SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", - SignatureAlgorithmOID.ED25519: "ed25519", - SignatureAlgorithmOID.ED448: "ed448", - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( - "GOST R 34.11-94 with GOST R 34.10-2001" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" - ), - HashAlgorithmOID.SHA1: "sha1", - HashAlgorithmOID.SHA224: "sha224", - HashAlgorithmOID.SHA256: "sha256", - HashAlgorithmOID.SHA384: "sha384", - HashAlgorithmOID.SHA512: "sha512", - HashAlgorithmOID.SHA3_224: "sha3_224", - HashAlgorithmOID.SHA3_256: "sha3_256", - HashAlgorithmOID.SHA3_384: "sha3_384", - HashAlgorithmOID.SHA3_512: "sha3_512", - PublicKeyAlgorithmOID.DSA: "dsaEncryption", - PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", - PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", - PublicKeyAlgorithmOID.X25519: "X25519", - PublicKeyAlgorithmOID.X448: "X448", - ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", - ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", - ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", - ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", - ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", - ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", - ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", - ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", - ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", - ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", - ExtensionOID.KEY_USAGE: "keyUsage", - ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod", - ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", - ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", - ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", - ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.PRECERT_POISON: "ctPoison", - ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", - ExtensionOID.ADMISSIONS: "Admissions", - CRLEntryExtensionOID.CRL_REASON: "cRLReason", - CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", - CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", - ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", - ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", - ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", - ExtensionOID.POLICY_MAPPINGS: "policyMappings", - ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", - ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", - ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", - ExtensionOID.FRESHEST_CRL: "freshestCRL", - ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", - ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", - ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", - ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", - ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", - ExtensionOID.CRL_NUMBER: "cRLNumber", - ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", - ExtensionOID.TLS_FEATURE: "TLSFeature", - AuthorityInformationAccessOID.OCSP: "OCSP", - AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", - SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", - CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", - CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", - OCSPExtensionOID.NONCE: "OCSPNonce", - AttributeOID.CHALLENGE_PASSWORD: "challengePassword", -} diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py deleted file mode 100644 index b4400aa..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from typing import Any - - -def default_backend() -> Any: - from cryptography.hazmat.backends.openssl.backend import backend - - return backend diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index c22081c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py deleted file mode 100644 index 51b0447..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.backends.openssl.backend import backend - -__all__ = ["backend"] diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index e661f9c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc deleted file mode 100644 index 1d5b45c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py deleted file mode 100644 index 361011a..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py +++ /dev/null @@ -1,308 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.bindings.openssl import binding -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import ec -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.asymmetric.padding import ( - MGF1, - OAEP, - PSS, - PKCS1v15, -) -from cryptography.hazmat.primitives.ciphers import ( - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.algorithms import ( - AES, -) -from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, - Mode, -) - - -class Backend: - """ - OpenSSL API binding interfaces. - """ - - name = "openssl" - - # TripleDES encryption is disallowed/deprecated throughout 2023 in - # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). - _fips_ciphers = (AES,) - # Sometimes SHA1 is still permissible. That logic is contained - # within the various *_supported methods. - _fips_hashes = ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA512_224, - hashes.SHA512_256, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, - hashes.SHAKE128, - hashes.SHAKE256, - ) - _fips_ecdh_curves = ( - ec.SECP224R1, - ec.SECP256R1, - ec.SECP384R1, - ec.SECP521R1, - ) - _fips_rsa_min_key_size = 2048 - _fips_rsa_min_public_exponent = 65537 - _fips_dsa_min_modulus = 1 << 2048 - _fips_dh_min_key_size = 2048 - _fips_dh_min_modulus = 1 << _fips_dh_min_key_size - - def __init__(self) -> None: - self._binding = binding.Binding() - self._ffi = self._binding.ffi - self._lib = self._binding.lib - self._fips_enabled = rust_openssl.is_fips_enabled() - - def __repr__(self) -> str: - return ( - f"" - ) - - def openssl_assert(self, ok: bool) -> None: - return binding._openssl_assert(ok) - - def _enable_fips(self) -> None: - # This function enables FIPS mode for OpenSSL 3.0.0 on installs that - # have the FIPS provider installed properly. - rust_openssl.enable_fips(rust_openssl._providers) - assert rust_openssl.is_fips_enabled() - self._fips_enabled = rust_openssl.is_fips_enabled() - - def openssl_version_text(self) -> str: - """ - Friendly string name of the loaded OpenSSL library. This is not - necessarily the same version as it was compiled against. - - Example: OpenSSL 3.2.1 30 Jan 2024 - """ - return rust_openssl.openssl_version_text() - - def openssl_version_number(self) -> int: - return rust_openssl.openssl_version() - - def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): - return False - - return rust_openssl.hashes.hash_supported(algorithm) - - def signature_hash_supported( - self, algorithm: hashes.HashAlgorithm - ) -> bool: - # Dedicated check for hashing algorithm use in message digest for - # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - return self.hash_supported(algorithm) - - def scrypt_supported(self) -> bool: - if self._fips_enabled: - return False - else: - return hasattr(rust_openssl.kdf.Scrypt, "derive") - - def argon2_supported(self) -> bool: - if self._fips_enabled: - return False - else: - return hasattr(rust_openssl.kdf.Argon2id, "derive") - - def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - # FIPS mode still allows SHA1 for HMAC - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return True - if rust_openssl.CRYPTOGRAPHY_IS_AWSLC: - return isinstance( - algorithm, - ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA512_224, - hashes.SHA512_256, - ), - ) - return self.hash_supported(algorithm) - - def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: - if self._fips_enabled: - # FIPS mode requires AES. TripleDES is disallowed/deprecated in - # FIPS 140-3. - if not isinstance(cipher, self._fips_ciphers): - return False - - return rust_openssl.ciphers.cipher_supported(cipher, mode) - - def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - return self.hmac_supported(algorithm) - - def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: - return rust_openssl.capture_error_stack() - - def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - - return isinstance( - algorithm, - ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ) - - def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: - if isinstance(padding, PKCS1v15): - return True - elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): - # FIPS 186-4 only allows salt length == digest length for PSS - # It is technically acceptable to set an explicit salt length - # equal to the digest length and this will incorrectly fail, but - # since we don't do that in the tests and this method is - # private, we'll ignore that until we need to do otherwise. - if ( - self._fips_enabled - and padding._salt_length != PSS.DIGEST_LENGTH - ): - return False - return self.hash_supported(padding._mgf._algorithm) - elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): - return self._oaep_hash_supported( - padding._mgf._algorithm - ) and self._oaep_hash_supported(padding._algorithm) - else: - return False - - def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: - if self._fips_enabled and isinstance(padding, PKCS1v15): - return False - else: - return self.rsa_padding_supported(padding) - - def dsa_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not self._fips_enabled - ) - - def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if not self.dsa_supported(): - return False - return self.signature_hash_supported(algorithm) - - def cmac_algorithm_supported(self, algorithm) -> bool: - return self.cipher_supported( - algorithm, CBC(b"\x00" * algorithm.block_size) - ) - - def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: - if self._fips_enabled and not isinstance( - curve, self._fips_ecdh_curves - ): - return False - - return rust_openssl.ec.curve_supported(curve) - - def elliptic_curve_signature_algorithm_supported( - self, - signature_algorithm: ec.EllipticCurveSignatureAlgorithm, - curve: ec.EllipticCurve, - ) -> bool: - # We only support ECDSA right now. - if not isinstance(signature_algorithm, ec.ECDSA): - return False - - return self.elliptic_curve_supported(curve) and ( - isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) - or self.hash_supported(signature_algorithm.algorithm) - ) - - def elliptic_curve_exchange_algorithm_supported( - self, algorithm: ec.ECDH, curve: ec.EllipticCurve - ) -> bool: - return self.elliptic_curve_supported(curve) and isinstance( - algorithm, ec.ECDH - ) - - def dh_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def dh_x942_serialization_supported(self) -> bool: - return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 - - def x25519_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def x448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def ed25519_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def ed448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - def ecdsa_deterministic_supported(self) -> bool: - return ( - rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER - and not self._fips_enabled - ) - - def poly1305_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def pkcs7_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC - ) - - -backend = Backend() diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index fc8f59c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so deleted file mode 100755 index efeab9c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi deleted file mode 100644 index 2f4eef4..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import padding -from cryptography.utils import Buffer - -class PKCS7PaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ANSIX923PaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class PKCS7UnpaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ANSIX923UnpaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: Buffer) -> bytes: ... - def finalize(self) -> bytes: ... - -class ObjectIdentifier: - def __init__(self, value: str) -> None: ... - @property - def dotted_string(self) -> str: ... - @property - def _name(self) -> str: ... - -T = typing.TypeVar("T") diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi deleted file mode 100644 index 8010008..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -lib = typing.Any -ffi = typing.Any diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi deleted file mode 100644 index 3b5f208..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi +++ /dev/null @@ -1,7 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... -def encode_dss_signature(r: int, s: int) -> bytes: ... -def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi deleted file mode 100644 index 09f46b1..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -class _Reasons: - BACKEND_MISSING_INTERFACE: _Reasons - UNSUPPORTED_HASH: _Reasons - UNSUPPORTED_CIPHER: _Reasons - UNSUPPORTED_PADDING: _Reasons - UNSUPPORTED_MGF: _Reasons - UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons - UNSUPPORTED_ELLIPTIC_CURVE: _Reasons - UNSUPPORTED_SERIALIZATION: _Reasons - UNSUPPORTED_X509: _Reasons - UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons - UNSUPPORTED_DIFFIE_HELLMAN: _Reasons - UNSUPPORTED_MAC: _Reasons diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi deleted file mode 100644 index 103e96c..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi +++ /dev/null @@ -1,117 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import datetime -from collections.abc import Iterator - -from cryptography import x509 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.x509 import ocsp - -class OCSPRequest: - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - @property - def extensions(self) -> x509.Extensions: ... - -class OCSPResponse: - @property - def responses(self) -> Iterator[OCSPSingleResponse]: ... - @property - def response_status(self) -> ocsp.OCSPResponseStatus: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_response_bytes(self) -> bytes: ... - @property - def certificates(self) -> list[x509.Certificate]: ... - @property - def responder_key_hash(self) -> bytes | None: ... - @property - def responder_name(self) -> x509.Name | None: ... - @property - def produced_at(self) -> datetime.datetime: ... - @property - def produced_at_utc(self) -> datetime.datetime: ... - @property - def certificate_status(self) -> ocsp.OCSPCertStatus: ... - @property - def revocation_time(self) -> datetime.datetime | None: ... - @property - def revocation_time_utc(self) -> datetime.datetime | None: ... - @property - def revocation_reason(self) -> x509.ReasonFlags | None: ... - @property - def this_update(self) -> datetime.datetime: ... - @property - def this_update_utc(self) -> datetime.datetime: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def single_extensions(self) -> x509.Extensions: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - -class OCSPSingleResponse: - @property - def certificate_status(self) -> ocsp.OCSPCertStatus: ... - @property - def revocation_time(self) -> datetime.datetime | None: ... - @property - def revocation_time_utc(self) -> datetime.datetime | None: ... - @property - def revocation_reason(self) -> x509.ReasonFlags | None: ... - @property - def this_update(self) -> datetime.datetime: ... - @property - def this_update_utc(self) -> datetime.datetime: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def issuer_key_hash(self) -> bytes: ... - @property - def issuer_name_hash(self) -> bytes: ... - @property - def hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def serial_number(self) -> int: ... - -def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... -def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... -def create_ocsp_request( - builder: ocsp.OCSPRequestBuilder, -) -> ocsp.OCSPRequest: ... -def create_ocsp_response( - status: ocsp.OCSPResponseStatus, - builder: ocsp.OCSPResponseBuilder | None, - private_key: PrivateKeyTypes | None, - hash_algorithm: hashes.HashAlgorithm | None, -) -> ocsp.OCSPResponse: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi deleted file mode 100644 index 5fb3cb2..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi +++ /dev/null @@ -1,75 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.bindings._rust.openssl import ( - aead, - ciphers, - cmac, - dh, - dsa, - ec, - ed448, - ed25519, - hashes, - hmac, - kdf, - keys, - poly1305, - rsa, - x448, - x25519, -) - -__all__ = [ - "aead", - "ciphers", - "cmac", - "dh", - "dsa", - "ec", - "ed448", - "ed25519", - "hashes", - "hmac", - "kdf", - "keys", - "openssl_version", - "openssl_version_text", - "poly1305", - "raise_openssl_error", - "rsa", - "x448", - "x25519", -] - -CRYPTOGRAPHY_IS_LIBRESSL: bool -CRYPTOGRAPHY_IS_BORINGSSL: bool -CRYPTOGRAPHY_IS_AWSLC: bool -CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool - -class Providers: ... - -_legacy_provider_loaded: bool -_providers: Providers - -def openssl_version() -> int: ... -def openssl_version_text() -> str: ... -def raise_openssl_error() -> typing.NoReturn: ... -def capture_error_stack() -> list[OpenSSLError]: ... -def is_fips_enabled() -> bool: ... -def enable_fips(providers: Providers) -> None: ... - -class OpenSSLError: - @property - def lib(self) -> int: ... - @property - def reason(self) -> int: ... - @property - def reason_text(self) -> bytes: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi deleted file mode 100644 index 831fcd1..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi +++ /dev/null @@ -1,107 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from collections.abc import Sequence - -from cryptography.utils import Buffer - -class AESGCM: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class ChaCha20Poly1305: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key() -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESCCM: - def __init__(self, key: Buffer, tag_length: int = 16) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESSIV: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - data: Buffer, - associated_data: Sequence[Buffer] | None, - ) -> bytes: ... - def decrypt( - self, - data: Buffer, - associated_data: Sequence[Buffer] | None, - ) -> bytes: ... - -class AESOCB3: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - -class AESGCMSIV: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_key(bit_length: int) -> bytes: ... - def encrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... - def decrypt( - self, - nonce: Buffer, - data: Buffer, - associated_data: Buffer | None, - ) -> bytes: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi deleted file mode 100644 index a48fb01..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers -from cryptography.hazmat.primitives.ciphers import modes - -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADEncryptionContext: ... -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None -) -> ciphers.CipherContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADDecryptionContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None -) -> ciphers.CipherContext: ... -def cipher_supported( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode -) -> bool: ... -def _advance( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... -def _advance_aad( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... - -class CipherContext: ... -class AEADEncryptionContext: ... -class AEADDecryptionContext: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi deleted file mode 100644 index 9c03508..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers - -class CMAC: - def __init__( - self, - algorithm: ciphers.BlockCipherAlgorithm, - backend: typing.Any = None, - ) -> None: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> CMAC: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi deleted file mode 100644 index 08733d7..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dh - -MIN_MODULUS_SIZE: int - -class DHPrivateKey: ... -class DHPublicKey: ... -class DHParameters: ... - -class DHPrivateNumbers: - def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... - def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DHPublicNumbers: ... - -class DHPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DHParameterNumbers - ) -> None: ... - def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DHParameterNumbers: ... - -class DHParameterNumbers: - def __init__(self, p: int, g: int, q: int | None = None) -> None: ... - def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... - @property - def p(self) -> int: ... - @property - def g(self) -> int: ... - @property - def q(self) -> int | None: ... - -def generate_parameters( - generator: int, key_size: int, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_pem_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_der_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi deleted file mode 100644 index 0922a4c..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dsa - -class DSAPrivateKey: ... -class DSAPublicKey: ... -class DSAParameters: ... - -class DSAPrivateNumbers: - def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DSAPublicNumbers: ... - def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... - -class DSAPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DSAParameterNumbers - ) -> None: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DSAParameterNumbers: ... - def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... - -class DSAParameterNumbers: - def __init__(self, p: int, q: int, g: int) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def g(self) -> int: ... - def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... - -def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi deleted file mode 100644 index 5c3b7bf..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import ec - -class ECPrivateKey: ... -class ECPublicKey: ... - -class EllipticCurvePrivateNumbers: - def __init__( - self, private_value: int, public_numbers: EllipticCurvePublicNumbers - ) -> None: ... - def private_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePrivateKey: ... - @property - def private_value(self) -> int: ... - @property - def public_numbers(self) -> EllipticCurvePublicNumbers: ... - -class EllipticCurvePublicNumbers: - def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... - def public_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePublicKey: ... - @property - def x(self) -> int: ... - @property - def y(self) -> int: ... - @property - def curve(self) -> ec.EllipticCurve: ... - def __eq__(self, other: object) -> bool: ... - -def curve_supported(curve: ec.EllipticCurve) -> bool: ... -def generate_private_key( - curve: ec.EllipticCurve, backend: typing.Any = None -) -> ec.EllipticCurvePrivateKey: ... -def from_private_numbers( - numbers: ec.EllipticCurvePrivateNumbers, -) -> ec.EllipticCurvePrivateKey: ... -def from_public_numbers( - numbers: ec.EllipticCurvePublicNumbers, -) -> ec.EllipticCurvePublicKey: ... -def from_public_bytes( - curve: ec.EllipticCurve, data: bytes -) -> ec.EllipticCurvePublicKey: ... -def derive_private_key( - private_value: int, curve: ec.EllipticCurve -) -> ec.EllipticCurvePrivateKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi deleted file mode 100644 index f85b3d1..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed25519 -from cryptography.utils import Buffer - -class Ed25519PrivateKey: ... -class Ed25519PublicKey: ... - -def generate_key() -> ed25519.Ed25519PrivateKey: ... -def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ... -def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi deleted file mode 100644 index c8ca0ec..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed448 -from cryptography.utils import Buffer - -class Ed448PrivateKey: ... -class Ed448PublicKey: ... - -def generate_key() -> ed448.Ed448PrivateKey: ... -def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ... -def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi deleted file mode 100644 index 6bfd295..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi +++ /dev/null @@ -1,28 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes -from cryptography.utils import Buffer - -class Hash(hashes.HashContext): - def __init__( - self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def copy(self) -> Hash: ... - -def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ... - -class XOFHash: - def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ... - @property - def algorithm(self) -> hashes.ExtendableOutputFunction: ... - def update(self, data: Buffer) -> None: ... - def squeeze(self, length: int) -> bytes: ... - def copy(self) -> XOFHash: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi deleted file mode 100644 index 3883d1b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi +++ /dev/null @@ -1,22 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes -from cryptography.utils import Buffer - -class HMAC(hashes.HashContext): - def __init__( - self, - key: Buffer, - algorithm: hashes.HashAlgorithm, - backend: typing.Any = None, - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> HMAC: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi deleted file mode 100644 index 9979e42..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi +++ /dev/null @@ -1,49 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.hashes import HashAlgorithm -from cryptography.utils import Buffer - -def derive_pbkdf2_hmac( - key_material: Buffer, - algorithm: HashAlgorithm, - salt: bytes, - iterations: int, - length: int, -) -> bytes: ... - -class Scrypt: - def __init__( - self, - salt: bytes, - length: int, - n: int, - r: int, - p: int, - backend: typing.Any = None, - ) -> None: ... - def derive(self, key_material: Buffer) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... - -class Argon2id: - def __init__( - self, - *, - salt: bytes, - length: int, - iterations: int, - lanes: int, - memory_cost: int, - ad: bytes | None = None, - secret: bytes | None = None, - ) -> None: ... - def derive(self, key_material: bytes) -> bytes: ... - def verify(self, key_material: bytes, expected_key: bytes) -> None: ... - def derive_phc_encoded(self, key_material: bytes) -> str: ... - @classmethod - def verify_phc_encoded( - cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None - ) -> None: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi deleted file mode 100644 index 404057e..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric.types import ( - PrivateKeyTypes, - PublicKeyTypes, -) -from cryptography.utils import Buffer - -def load_der_private_key( - data: Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_pem_private_key( - data: Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_der_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... -def load_pem_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi deleted file mode 100644 index 45a2a39..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.utils import Buffer - -class Poly1305: - def __init__(self, key: Buffer) -> None: ... - @staticmethod - def generate_tag(key: Buffer, data: Buffer) -> bytes: ... - @staticmethod - def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ... - def update(self, data: Buffer) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, tag: bytes) -> None: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi deleted file mode 100644 index ef7752d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import rsa - -class RSAPrivateKey: ... -class RSAPublicKey: ... - -class RSAPrivateNumbers: - def __init__( - self, - p: int, - q: int, - d: int, - dmp1: int, - dmq1: int, - iqmp: int, - public_numbers: RSAPublicNumbers, - ) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def d(self) -> int: ... - @property - def dmp1(self) -> int: ... - @property - def dmq1(self) -> int: ... - @property - def iqmp(self) -> int: ... - @property - def public_numbers(self) -> RSAPublicNumbers: ... - def private_key( - self, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, - ) -> rsa.RSAPrivateKey: ... - -class RSAPublicNumbers: - def __init__(self, e: int, n: int) -> None: ... - @property - def n(self) -> int: ... - @property - def e(self) -> int: ... - def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... - -def generate_private_key( - public_exponent: int, - key_size: int, -) -> rsa.RSAPrivateKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi deleted file mode 100644 index 38d2add..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x25519 -from cryptography.utils import Buffer - -class X25519PrivateKey: ... -class X25519PublicKey: ... - -def generate_key() -> x25519.X25519PrivateKey: ... -def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ... -def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi deleted file mode 100644 index 3ac0980..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x448 -from cryptography.utils import Buffer - -class X448PrivateKey: ... -class X448PublicKey: ... - -def generate_key() -> x448.X448PrivateKey: ... -def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ... -def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi deleted file mode 100644 index b25becb..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.hazmat.primitives.serialization import ( - KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.pkcs12 import ( - PKCS12KeyAndCertificates, - PKCS12PrivateKeyTypes, -) -from cryptography.utils import Buffer - -class PKCS12Certificate: - def __init__( - self, cert: x509.Certificate, friendly_name: bytes | None - ) -> None: ... - @property - def friendly_name(self) -> bytes | None: ... - @property - def certificate(self) -> x509.Certificate: ... - -def load_key_and_certificates( - data: Buffer, - password: Buffer | None, - backend: typing.Any = None, -) -> tuple[ - PrivateKeyTypes | None, - x509.Certificate | None, - list[x509.Certificate], -]: ... -def load_pkcs12( - data: bytes, - password: bytes | None, - backend: typing.Any = None, -) -> PKCS12KeyAndCertificates: ... -def serialize_java_truststore( - certs: Iterable[PKCS12Certificate], - encryption_algorithm: KeySerializationEncryption, -) -> bytes: ... -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: Iterable[x509.Certificate | PKCS12Certificate] | None, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi deleted file mode 100644 index 358b135..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.serialization import pkcs7 - -def serialize_certificates( - certs: list[x509.Certificate], - encoding: serialization.Encoding, -) -> bytes: ... -def encrypt_and_serialize( - builder: pkcs7.PKCS7EnvelopeBuilder, - content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm, - encoding: serialization.Encoding, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def sign_and_serialize( - builder: pkcs7.PKCS7SignatureBuilder, - encoding: serialization.Encoding, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_der( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_pem( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def decrypt_smime( - data: bytes, - certificate: x509.Certificate, - private_key: rsa.RSAPrivateKey, - options: Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def load_pem_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_der_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi deleted file mode 100644 index c6c6d0b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.serialization import pkcs7 -from cryptography.utils import Buffer - -class TestCertificate: - not_after_tag: int - not_before_tag: int - issuer_value_tags: list[int] - subject_value_tags: list[int] - -def test_parse_certificate(data: bytes) -> TestCertificate: ... -def pkcs7_verify( - encoding: serialization.Encoding, - sig: bytes, - msg: Buffer | None, - certs: list[x509.Certificate], - options: list[pkcs7.PKCS7Options], -) -> None: ... diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi deleted file mode 100644 index 6ba9afb..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi +++ /dev/null @@ -1,313 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import datetime -import typing -from collections.abc import Iterator - -from cryptography import x509 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric.ec import ECDSA -from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, - PrivateKeyTypes, -) -from cryptography.x509 import certificate_transparency - -def load_pem_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_der_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_pem_x509_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_pem_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_der_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_pem_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def load_der_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def encode_name_bytes(name: x509.Name) -> bytes: ... -def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... -def create_x509_certificate( - builder: x509.CertificateBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.Certificate: ... -def create_x509_csr( - builder: x509.CertificateSigningRequestBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.CertificateSigningRequest: ... -def create_x509_crl( - builder: x509.CertificateRevocationListBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, - ecdsa_deterministic: bool | None, -) -> x509.CertificateRevocationList: ... - -class Sct: - @property - def version(self) -> certificate_transparency.Version: ... - @property - def log_id(self) -> bytes: ... - @property - def timestamp(self) -> datetime.datetime: ... - @property - def entry_type(self) -> certificate_transparency.LogEntryType: ... - @property - def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ... - @property - def signature_algorithm( - self, - ) -> certificate_transparency.SignatureAlgorithm: ... - @property - def signature(self) -> bytes: ... - @property - def extension_bytes(self) -> bytes: ... - -class Certificate: - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... - @property - def serial_number(self) -> int: ... - @property - def version(self) -> x509.Version: ... - def public_key(self) -> CertificatePublicKeyTypes: ... - @property - def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def not_valid_before(self) -> datetime.datetime: ... - @property - def not_valid_before_utc(self) -> datetime.datetime: ... - @property - def not_valid_after(self) -> datetime.datetime: ... - @property - def not_valid_after_utc(self) -> datetime.datetime: ... - @property - def issuer(self) -> x509.Name: ... - @property - def subject(self) -> x509.Name: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certificate_bytes(self) -> bytes: ... - @property - def tbs_precertificate_bytes(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - def verify_directly_issued_by(self, issuer: Certificate) -> None: ... - -class RevokedCertificate: ... - -class CertificateRevocationList: - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... - def get_revoked_certificate_by_serial_number( - self, serial_number: int - ) -> x509.RevokedCertificate | None: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def issuer(self) -> x509.Name: ... - @property - def next_update(self) -> datetime.datetime | None: ... - @property - def next_update_utc(self) -> datetime.datetime | None: ... - @property - def last_update(self) -> datetime.datetime: ... - @property - def last_update_utc(self) -> datetime.datetime: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certlist_bytes(self) -> bytes: ... - def __eq__(self, other: object) -> bool: ... - def __len__(self) -> int: ... - @typing.overload - def __getitem__(self, idx: int) -> x509.RevokedCertificate: ... - @typing.overload - def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ... - def __iter__(self) -> Iterator[x509.RevokedCertificate]: ... - def is_signature_valid( - self, public_key: CertificateIssuerPublicKeyTypes - ) -> bool: ... - -class CertificateSigningRequest: - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def public_key(self) -> CertificatePublicKeyTypes: ... - @property - def subject(self) -> x509.Name: ... - @property - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: ... - @property - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... - @property - def signature_algorithm_parameters( - self, - ) -> PSS | PKCS1v15 | ECDSA | None: ... - @property - def extensions(self) -> x509.Extensions: ... - @property - def attributes(self) -> x509.Attributes: ... - def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... - @property - def signature(self) -> bytes: ... - @property - def tbs_certrequest_bytes(self) -> bytes: ... - @property - def is_signature_valid(self) -> bool: ... - def get_attribute_for_oid(self, oid: x509.ObjectIdentifier) -> bytes: ... - -class PolicyBuilder: - def time(self, time: datetime.datetime) -> PolicyBuilder: ... - def store(self, store: Store) -> PolicyBuilder: ... - def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ... - def extension_policies( - self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy - ) -> PolicyBuilder: ... - def build_client_verifier(self) -> ClientVerifier: ... - def build_server_verifier( - self, subject: x509.verification.Subject - ) -> ServerVerifier: ... - -class Policy: - @property - def max_chain_depth(self) -> int: ... - @property - def subject(self) -> x509.verification.Subject | None: ... - @property - def validation_time(self) -> datetime.datetime: ... - @property - def extended_key_usage(self) -> x509.ObjectIdentifier: ... - @property - def minimum_rsa_modulus(self) -> int: ... - -class Criticality: - CRITICAL: Criticality - AGNOSTIC: Criticality - NON_CRITICAL: Criticality - -T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType) - -MaybeExtensionValidatorCallback = typing.Callable[ - [ - Policy, - x509.Certificate, - T | None, - ], - None, -] - -PresentExtensionValidatorCallback = typing.Callable[ - [Policy, x509.Certificate, T], - None, -] - -class ExtensionPolicy: - @staticmethod - def permit_all() -> ExtensionPolicy: ... - @staticmethod - def webpki_defaults_ca() -> ExtensionPolicy: ... - @staticmethod - def webpki_defaults_ee() -> ExtensionPolicy: ... - def require_not_present( - self, extension_type: type[x509.ExtensionType] - ) -> ExtensionPolicy: ... - def may_be_present( - self, - extension_type: type[T], - criticality: Criticality, - validator: MaybeExtensionValidatorCallback[T] | None, - ) -> ExtensionPolicy: ... - def require_present( - self, - extension_type: type[T], - criticality: Criticality, - validator: PresentExtensionValidatorCallback[T] | None, - ) -> ExtensionPolicy: ... - -class VerifiedClient: - @property - def subjects(self) -> list[x509.GeneralName] | None: ... - @property - def chain(self) -> list[x509.Certificate]: ... - -class ClientVerifier: - @property - def policy(self) -> Policy: ... - @property - def validation_time(self) -> datetime.datetime: ... - @property - def max_chain_depth(self) -> int: ... - @property - def store(self) -> Store: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> VerifiedClient: ... - -class ServerVerifier: - @property - def policy(self) -> Policy: ... - @property - def subject(self) -> x509.verification.Subject: ... - @property - def validation_time(self) -> datetime.datetime: ... - @property - def max_chain_depth(self) -> int: ... - @property - def store(self) -> Store: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> list[x509.Certificate]: ... - -class Store: - def __init__(self, certs: list[x509.Certificate]) -> None: ... - -class VerificationError(Exception): - pass diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 715eb58..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc deleted file mode 100644 index 5cbe2f2..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc deleted file mode 100644 index 42006be..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py deleted file mode 100644 index c90c916..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py +++ /dev/null @@ -1,191 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -def cryptography_has_set_cert_cb() -> list[str]: - return [ - "SSL_CTX_set_cert_cb", - "SSL_set_cert_cb", - ] - - -def cryptography_has_ssl_st() -> list[str]: - return [ - "SSL_ST_BEFORE", - "SSL_ST_OK", - "SSL_ST_INIT", - "SSL_ST_RENEGOTIATE", - ] - - -def cryptography_has_tls_st() -> list[str]: - return [ - "TLS_ST_BEFORE", - "TLS_ST_OK", - ] - - -def cryptography_has_ssl_sigalgs() -> list[str]: - return [ - "SSL_CTX_set1_sigalgs_list", - ] - - -def cryptography_has_psk() -> list[str]: - return [ - "SSL_CTX_use_psk_identity_hint", - "SSL_CTX_set_psk_server_callback", - "SSL_CTX_set_psk_client_callback", - ] - - -def cryptography_has_psk_tlsv13() -> list[str]: - return [ - "SSL_CTX_set_psk_find_session_callback", - "SSL_CTX_set_psk_use_session_callback", - "Cryptography_SSL_SESSION_new", - "SSL_CIPHER_find", - "SSL_SESSION_set1_master_key", - "SSL_SESSION_set_cipher", - "SSL_SESSION_set_protocol_version", - ] - - -def cryptography_has_custom_ext() -> list[str]: - return [ - "SSL_CTX_add_client_custom_ext", - "SSL_CTX_add_server_custom_ext", - "SSL_extension_supported", - ] - - -def cryptography_has_tlsv13_functions() -> list[str]: - return [ - "SSL_CTX_set_ciphersuites", - ] - - -def cryptography_has_tlsv13_hs_functions() -> list[str]: - return [ - "SSL_VERIFY_POST_HANDSHAKE", - "SSL_verify_client_post_handshake", - "SSL_CTX_set_post_handshake_auth", - "SSL_set_post_handshake_auth", - "SSL_SESSION_get_max_early_data", - "SSL_write_early_data", - "SSL_read_early_data", - "SSL_CTX_set_max_early_data", - ] - - -def cryptography_has_engine() -> list[str]: - return [ - "ENGINE_by_id", - "ENGINE_init", - "ENGINE_finish", - "ENGINE_get_default_RAND", - "ENGINE_set_default_RAND", - "ENGINE_unregister_RAND", - "ENGINE_ctrl_cmd", - "ENGINE_free", - "ENGINE_get_name", - "ENGINE_ctrl_cmd_string", - "ENGINE_load_builtin_engines", - "ENGINE_load_private_key", - "ENGINE_load_public_key", - "SSL_CTX_set_client_cert_engine", - ] - - -def cryptography_has_verified_chain() -> list[str]: - return [ - "SSL_get0_verified_chain", - ] - - -def cryptography_has_srtp() -> list[str]: - return [ - "SSL_CTX_set_tlsext_use_srtp", - "SSL_set_tlsext_use_srtp", - "SSL_get_selected_srtp_profile", - ] - - -def cryptography_has_op_no_renegotiation() -> list[str]: - return [ - "SSL_OP_NO_RENEGOTIATION", - ] - - -def cryptography_has_dtls_get_data_mtu() -> list[str]: - return [ - "DTLS_get_data_mtu", - ] - - -def cryptography_has_ssl_cookie() -> list[str]: - return [ - "SSL_OP_COOKIE_EXCHANGE", - "DTLSv1_listen", - "SSL_CTX_set_cookie_generate_cb", - "SSL_CTX_set_cookie_verify_cb", - ] - - -def cryptography_has_prime_checks() -> list[str]: - return [ - "BN_prime_checks_for_size", - ] - - -def cryptography_has_unexpected_eof_while_reading() -> list[str]: - return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] - - -def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: - return [ - "SSL_OP_IGNORE_UNEXPECTED_EOF", - ] - - -def cryptography_has_get_extms_support() -> list[str]: - return ["SSL_get_extms_support"] - - -# This is a mapping of -# {condition: function-returning-names-dependent-on-that-condition} so we can -# loop over them and delete unsupported names at runtime. It will be removed -# when cffi supports #if in cdef. We use functions instead of just a dict of -# lists so we can use coverage to measure which are used. -CONDITIONAL_NAMES = { - "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, - "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, - "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, - "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, - "Cryptography_HAS_PSK": cryptography_has_psk, - "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, - "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, - "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, - "Cryptography_HAS_TLSv1_3_HS_FUNCTIONS": ( - cryptography_has_tlsv13_hs_functions - ), - "Cryptography_HAS_ENGINE": cryptography_has_engine, - "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, - "Cryptography_HAS_SRTP": cryptography_has_srtp, - "Cryptography_HAS_OP_NO_RENEGOTIATION": ( - cryptography_has_op_no_renegotiation - ), - "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, - "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, - "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, - "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( - cryptography_has_unexpected_eof_while_reading - ), - "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( - cryptography_has_ssl_op_ignore_unexpected_eof - ), - "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, -} diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py deleted file mode 100644 index 7de2c65..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py +++ /dev/null @@ -1,122 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import os -import sys -import threading -import types -import typing -import warnings -from collections.abc import Callable - -import cryptography -from cryptography.exceptions import InternalError -from cryptography.hazmat.bindings._rust import _openssl, openssl -from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES - - -def _openssl_assert(ok: bool) -> None: - if not ok: - errors = openssl.capture_error_stack() - - raise InternalError( - "Unknown OpenSSL error. This error is commonly encountered when " - "another library is not cleaning up the OpenSSL error stack. If " - "you are using cryptography with another library that uses " - "OpenSSL try disabling it before reporting a bug. Otherwise " - "please file an issue at https://github.com/pyca/cryptography/" - "issues with information on how to reproduce " - f"this. ({errors!r})", - errors, - ) - - -def build_conditional_library( - lib: typing.Any, - conditional_names: dict[str, Callable[[], list[str]]], -) -> typing.Any: - conditional_lib = types.ModuleType("lib") - conditional_lib._original_lib = lib # type: ignore[attr-defined] - excluded_names = set() - for condition, names_cb in conditional_names.items(): - if not getattr(lib, condition): - excluded_names.update(names_cb()) - - for attr in dir(lib): - if attr not in excluded_names: - setattr(conditional_lib, attr, getattr(lib, attr)) - - return conditional_lib - - -class Binding: - """ - OpenSSL API wrapper. - """ - - lib: typing.ClassVar = None - ffi = _openssl.ffi - _lib_loaded = False - _init_lock = threading.Lock() - - def __init__(self) -> None: - self._ensure_ffi_initialized() - - @classmethod - def _ensure_ffi_initialized(cls) -> None: - with cls._init_lock: - if not cls._lib_loaded: - cls.lib = build_conditional_library( - _openssl.lib, CONDITIONAL_NAMES - ) - cls._lib_loaded = True - - @classmethod - def init_static_locks(cls) -> None: - cls._ensure_ffi_initialized() - - -def _verify_package_version(version: str) -> None: - # Occasionally we run into situations where the version of the Python - # package does not match the version of the shared object that is loaded. - # This may occur in environments where multiple versions of cryptography - # are installed and available in the python path. To avoid errors cropping - # up later this code checks that the currently imported package and the - # shared object that were loaded have the same version and raise an - # ImportError if they do not - so_package_version = _openssl.ffi.string( - _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION - ) - if version.encode("ascii") != so_package_version: - raise ImportError( - "The version of cryptography does not match the loaded " - "shared object. This can happen if you have multiple copies of " - "cryptography installed in your Python path. Please try creating " - "a new virtual environment to resolve this issue. " - f"Loaded python version: {version}, " - f"shared object version: {so_package_version}" - ) - - _openssl_assert( - _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), - ) - - -_verify_package_version(cryptography.__version__) - -Binding.init_static_locks() - -if ( - sys.platform == "win32" - and os.environ.get("PROCESSOR_ARCHITEW6432") is not None -): - warnings.warn( - "You are using cryptography on a 32-bit Python on a 64-bit Windows " - "Operating System. Cryptography will be significantly faster if you " - "switch to using a 64-bit Python.", - UserWarning, - stacklevel=2, - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index ecc8cda..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 3ae8894..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc deleted file mode 100644 index d4abb7b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py deleted file mode 100644 index 072a991..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py +++ /dev/null @@ -1,112 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, - _verify_key_size, -) - - -class ARC4(CipherAlgorithm): - name = "RC4" - key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class TripleDES(BlockCipherAlgorithm): - name = "3DES" - block_size = 64 - key_sizes = frozenset([64, 128, 192]) - - def __init__(self, key: bytes): - if len(key) == 8: - key += key + key - elif len(key) == 16: - key += key[:8] - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -# Not actually supported, marker for tests -class _DES: - key_size = 64 - - -class Blowfish(BlockCipherAlgorithm): - name = "Blowfish" - block_size = 64 - key_sizes = frozenset(range(32, 449, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class CAST5(BlockCipherAlgorithm): - name = "CAST5" - block_size = 64 - key_sizes = frozenset(range(40, 129, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SEED(BlockCipherAlgorithm): - name = "SEED" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class IDEA(BlockCipherAlgorithm): - name = "IDEA" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -# This class only allows RC2 with a 128-bit key. No support for -# effective key bits or other key sizes is provided. -class RC2(BlockCipherAlgorithm): - name = "RC2" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 46d95f0..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc deleted file mode 100644 index 06bb46a..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc deleted file mode 100644 index 1740a3d..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc deleted file mode 100644 index aa56f0b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc deleted file mode 100644 index b307ee3..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc deleted file mode 100644 index 59e0607..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc deleted file mode 100644 index 69ba017..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc deleted file mode 100644 index ad996d3..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc deleted file mode 100644 index ee537ce..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc deleted file mode 100644 index 630de29..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc deleted file mode 100644 index ab0fc93..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py deleted file mode 100644 index ea55ffd..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -# This exists to break an import cycle. It is normally accessible from the -# asymmetric padding module. - - -class AsymmetricPadding(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this padding (e.g. "PSS", "PKCS1"). - """ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py deleted file mode 100644 index 305a9fd..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py +++ /dev/null @@ -1,60 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils - -# This exists to break an import cycle. It is normally accessible from the -# ciphers module. - - -class CipherAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "AES", "Camellia"). - """ - - @property - @abc.abstractmethod - def key_sizes(self) -> frozenset[int]: - """ - Valid key sizes for this algorithm in bits - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The size of the key being used as an integer in bits (e.g. 128, 256). - """ - - -class BlockCipherAlgorithm(CipherAlgorithm): - key: utils.Buffer - - @property - @abc.abstractmethod - def block_size(self) -> int: - """ - The size of a block as an integer in bits (e.g. 64, 128). - """ - - -def _verify_key_size( - algorithm: CipherAlgorithm, key: utils.Buffer -) -> utils.Buffer: - # Verify that the key is instance of bytes - utils._check_byteslike("key", key) - - # Verify that the key size matches the expected key size - if len(key) * 8 not in algorithm.key_sizes: - raise ValueError( - f"Invalid key size ({len(key) * 8}) for {algorithm.name}." - ) - return key diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py deleted file mode 100644 index e998865..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py +++ /dev/null @@ -1,168 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.hazmat.primitives.hashes import HashAlgorithm - -# This exists to break an import cycle. These classes are normally accessible -# from the serialization module. - - -class PBES(utils.Enum): - PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" - PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" - - -class Encoding(utils.Enum): - PEM = "PEM" - DER = "DER" - OpenSSH = "OpenSSH" - Raw = "Raw" - X962 = "ANSI X9.62" - SMIME = "S/MIME" - - -class PrivateFormat(utils.Enum): - PKCS8 = "PKCS8" - TraditionalOpenSSL = "TraditionalOpenSSL" - Raw = "Raw" - OpenSSH = "OpenSSH" - PKCS12 = "PKCS12" - - def encryption_builder(self) -> KeySerializationEncryptionBuilder: - if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): - raise ValueError( - "encryption_builder only supported with PrivateFormat.OpenSSH" - " and PrivateFormat.PKCS12" - ) - return KeySerializationEncryptionBuilder(self) - - -class PublicFormat(utils.Enum): - SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" - PKCS1 = "Raw PKCS#1" - OpenSSH = "OpenSSH" - Raw = "Raw" - CompressedPoint = "X9.62 Compressed Point" - UncompressedPoint = "X9.62 Uncompressed Point" - - -class ParameterFormat(utils.Enum): - PKCS3 = "PKCS3" - - -class KeySerializationEncryption(metaclass=abc.ABCMeta): - pass - - -class BestAvailableEncryption(KeySerializationEncryption): - def __init__(self, password: bytes): - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - self.password = password - - -class NoEncryption(KeySerializationEncryption): - pass - - -class KeySerializationEncryptionBuilder: - def __init__( - self, - format: PrivateFormat, - *, - _kdf_rounds: int | None = None, - _hmac_hash: HashAlgorithm | None = None, - _key_cert_algorithm: PBES | None = None, - ) -> None: - self._format = format - - self._kdf_rounds = _kdf_rounds - self._hmac_hash = _hmac_hash - self._key_cert_algorithm = _key_cert_algorithm - - def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: - if self._kdf_rounds is not None: - raise ValueError("kdf_rounds already set") - - if not isinstance(rounds, int): - raise TypeError("kdf_rounds must be an integer") - - if rounds < 1: - raise ValueError("kdf_rounds must be a positive integer") - - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def hmac_hash( - self, algorithm: HashAlgorithm - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "hmac_hash only supported with PrivateFormat.PKCS12" - ) - - if self._hmac_hash is not None: - raise ValueError("hmac_hash already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=algorithm, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def key_cert_algorithm( - self, algorithm: PBES - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "key_cert_algorithm only supported with PrivateFormat.PKCS12" - ) - if self._key_cert_algorithm is not None: - raise ValueError("key_cert_algorithm already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=algorithm, - ) - - def build(self, password: bytes) -> KeySerializationEncryption: - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - return _KeySerializationEncryption( - self._format, - password, - kdf_rounds=self._kdf_rounds, - hmac_hash=self._hmac_hash, - key_cert_algorithm=self._key_cert_algorithm, - ) - - -class _KeySerializationEncryption(KeySerializationEncryption): - def __init__( - self, - format: PrivateFormat, - password: bytes, - *, - kdf_rounds: int | None, - hmac_hash: HashAlgorithm | None, - key_cert_algorithm: PBES | None, - ): - self._format = format - self.password = password - - self._kdf_rounds = kdf_rounds - self._hmac_hash = hmac_hash - self._key_cert_algorithm = key_cert_algorithm diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py deleted file mode 100644 index b509336..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 5053d98..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc deleted file mode 100644 index 370ff3c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc deleted file mode 100644 index 3eb68a7..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc deleted file mode 100644 index fc5be92..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc deleted file mode 100644 index 487a245..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc deleted file mode 100644 index 6af0e4f..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc deleted file mode 100644 index 82ec490..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc deleted file mode 100644 index a0b2100..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc deleted file mode 100644 index cd5a42b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 3ef9639..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc deleted file mode 100644 index 3a37918..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc deleted file mode 100644 index a974230..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py deleted file mode 100644 index 1822e99..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py +++ /dev/null @@ -1,147 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - -generate_parameters = rust_openssl.dh.generate_parameters - - -DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers -DHPublicNumbers = rust_openssl.dh.DHPublicNumbers -DHParameterNumbers = rust_openssl.dh.DHParameterNumbers - - -class DHParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DHPrivateKey: - """ - Generates and returns a DHPrivateKey. - """ - - @abc.abstractmethod - def parameter_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.ParameterFormat, - ) -> bytes: - """ - Returns the parameters serialized as bytes. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DHParameterNumbers: - """ - Returns a DHParameterNumbers. - """ - - -DHParametersWithSerialization = DHParameters -DHParameters.register(rust_openssl.dh.DHParameters) - - -class DHPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DHPublicNumbers: - """ - Returns a DHPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> DHPublicKey: - """ - Returns a copy. - """ - - -DHPublicKeyWithSerialization = DHPublicKey -DHPublicKey.register(rust_openssl.dh.DHPublicKey) - - -class DHPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DHPublicKey: - """ - The DHPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this private key. - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: DHPublicKey) -> bytes: - """ - Given peer's DHPublicKey, carry out the key exchange and - return shared key as bytes. - """ - - @abc.abstractmethod - def private_numbers(self) -> DHPrivateNumbers: - """ - Returns a DHPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> DHPrivateKey: - """ - Returns a copy. - """ - - -DHPrivateKeyWithSerialization = DHPrivateKey -DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py deleted file mode 100644 index 21d78ba..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py +++ /dev/null @@ -1,167 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.utils import Buffer - - -class DSAParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DSAPrivateKey: - """ - Generates and returns a DSAPrivateKey. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DSAParameterNumbers: - """ - Returns a DSAParameterNumbers. - """ - - -DSAParametersWithNumbers = DSAParameters -DSAParameters.register(rust_openssl.dsa.DSAParameters) - - -class DSAPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DSAPublicKey: - """ - The DSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: Buffer, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> DSAPrivateNumbers: - """ - Returns a DSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> DSAPrivateKey: - """ - Returns a copy. - """ - - -DSAPrivateKeyWithSerialization = DSAPrivateKey -DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) - - -class DSAPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DSAPublicNumbers: - """ - Returns a DSAPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: Buffer, - data: Buffer, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> DSAPublicKey: - """ - Returns a copy. - """ - - -DSAPublicKeyWithSerialization = DSAPublicKey -DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) - -DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers -DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers -DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers - - -def generate_parameters( - key_size: int, backend: typing.Any = None -) -> DSAParameters: - if key_size not in (1024, 2048, 3072, 4096): - raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") - - return rust_openssl.dsa.generate_parameters(key_size) - - -def generate_private_key( - key_size: int, backend: typing.Any = None -) -> DSAPrivateKey: - parameters = generate_parameters(key_size) - return parameters.generate_private_key() diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py deleted file mode 100644 index a13d982..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py +++ /dev/null @@ -1,447 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat._oid import ObjectIdentifier -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class EllipticCurveOID: - SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") - SECP224R1 = ObjectIdentifier("1.3.132.0.33") - SECP256K1 = ObjectIdentifier("1.3.132.0.10") - SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") - SECP384R1 = ObjectIdentifier("1.3.132.0.34") - SECP521R1 = ObjectIdentifier("1.3.132.0.35") - BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") - BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") - BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") - SECT163K1 = ObjectIdentifier("1.3.132.0.1") - SECT163R2 = ObjectIdentifier("1.3.132.0.15") - SECT233K1 = ObjectIdentifier("1.3.132.0.26") - SECT233R1 = ObjectIdentifier("1.3.132.0.27") - SECT283K1 = ObjectIdentifier("1.3.132.0.16") - SECT283R1 = ObjectIdentifier("1.3.132.0.17") - SECT409K1 = ObjectIdentifier("1.3.132.0.36") - SECT409R1 = ObjectIdentifier("1.3.132.0.37") - SECT571K1 = ObjectIdentifier("1.3.132.0.38") - SECT571R1 = ObjectIdentifier("1.3.132.0.39") - - -class EllipticCurve(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - The name of the curve. e.g. secp256r1. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @property - @abc.abstractmethod - def group_order(self) -> int: - """ - The order of the curve's group. - """ - - -class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - """ - The digest algorithm used with this signature. - """ - - -class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def exchange( - self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey - ) -> bytes: - """ - Performs a key exchange operation using the provided algorithm with the - provided peer's public key. - """ - - @abc.abstractmethod - def public_key(self) -> EllipticCurvePublicKey: - """ - The EllipticCurvePublicKey for this private key. - """ - - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def sign( - self, - data: utils.Buffer, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> EllipticCurvePrivateNumbers: - """ - Returns an EllipticCurvePrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> EllipticCurvePrivateKey: - """ - Returns a copy. - """ - - -EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey -EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) - - -class EllipticCurvePublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def public_numbers(self) -> EllipticCurvePublicNumbers: - """ - Returns an EllipticCurvePublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: utils.Buffer, - data: utils.Buffer, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @classmethod - def from_encoded_point( - cls, curve: EllipticCurve, data: bytes - ) -> EllipticCurvePublicKey: - utils._check_bytes("data", data) - - if len(data) == 0: - raise ValueError("data must not be an empty byte string") - - if data[0] not in [0x02, 0x03, 0x04]: - raise ValueError("Unsupported elliptic curve point type") - - return rust_openssl.ec.from_public_bytes(curve, data) - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> EllipticCurvePublicKey: - """ - Returns a copy. - """ - - -EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey -EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) - -EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers -EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers - - -class SECT571R1(EllipticCurve): - name = "sect571r1" - key_size = 570 - group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47 # noqa: E501 - - -class SECT409R1(EllipticCurve): - name = "sect409r1" - key_size = 409 - group_order = 0x10000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173 # noqa: E501 - - -class SECT283R1(EllipticCurve): - name = "sect283r1" - key_size = 283 - group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307 # noqa: E501 - - -class SECT233R1(EllipticCurve): - name = "sect233r1" - key_size = 233 - group_order = 0x1000000000000000000000000000013E974E72F8A6922031D2603CFE0D7 - - -class SECT163R2(EllipticCurve): - name = "sect163r2" - key_size = 163 - group_order = 0x40000000000000000000292FE77E70C12A4234C33 - - -class SECT571K1(EllipticCurve): - name = "sect571k1" - key_size = 571 - group_order = 0x20000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001 # noqa: E501 - - -class SECT409K1(EllipticCurve): - name = "sect409k1" - key_size = 409 - group_order = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF # noqa: E501 - - -class SECT283K1(EllipticCurve): - name = "sect283k1" - key_size = 283 - group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61 # noqa: E501 - - -class SECT233K1(EllipticCurve): - name = "sect233k1" - key_size = 233 - group_order = 0x8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF - - -class SECT163K1(EllipticCurve): - name = "sect163k1" - key_size = 163 - group_order = 0x4000000000000000000020108A2E0CC0D99F8A5EF - - -class SECP521R1(EllipticCurve): - name = "secp521r1" - key_size = 521 - group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 # noqa: E501 - - -class SECP384R1(EllipticCurve): - name = "secp384r1" - key_size = 384 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 # noqa: E501 - - -class SECP256R1(EllipticCurve): - name = "secp256r1" - key_size = 256 - group_order = ( - 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 - ) - - -class SECP256K1(EllipticCurve): - name = "secp256k1" - key_size = 256 - group_order = ( - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 - ) - - -class SECP224R1(EllipticCurve): - name = "secp224r1" - key_size = 224 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D - - -class SECP192R1(EllipticCurve): - name = "secp192r1" - key_size = 192 - group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831 - - -class BrainpoolP256R1(EllipticCurve): - name = "brainpoolP256r1" - key_size = 256 - group_order = ( - 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 - ) - - -class BrainpoolP384R1(EllipticCurve): - name = "brainpoolP384r1" - key_size = 384 - group_order = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565 # noqa: E501 - - -class BrainpoolP512R1(EllipticCurve): - name = "brainpoolP512r1" - key_size = 512 - group_order = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069 # noqa: E501 - - -_CURVE_TYPES: dict[str, EllipticCurve] = { - "prime192v1": SECP192R1(), - "prime256v1": SECP256R1(), - "secp192r1": SECP192R1(), - "secp224r1": SECP224R1(), - "secp256r1": SECP256R1(), - "secp384r1": SECP384R1(), - "secp521r1": SECP521R1(), - "secp256k1": SECP256K1(), - "sect163k1": SECT163K1(), - "sect233k1": SECT233K1(), - "sect283k1": SECT283K1(), - "sect409k1": SECT409K1(), - "sect571k1": SECT571K1(), - "sect163r2": SECT163R2(), - "sect233r1": SECT233R1(), - "sect283r1": SECT283R1(), - "sect409r1": SECT409R1(), - "sect571r1": SECT571R1(), - "brainpoolP256r1": BrainpoolP256R1(), - "brainpoolP384r1": BrainpoolP384R1(), - "brainpoolP512r1": BrainpoolP512R1(), -} - - -class ECDSA(EllipticCurveSignatureAlgorithm): - def __init__( - self, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - deterministic_signing: bool = False, - ): - from cryptography.hazmat.backends.openssl.backend import backend - - if ( - deterministic_signing - and not backend.ecdsa_deterministic_supported() - ): - raise UnsupportedAlgorithm( - "ECDSA with deterministic signature (RFC 6979) is not " - "supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - self._algorithm = algorithm - self._deterministic_signing = deterministic_signing - - @property - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - return self._algorithm - - @property - def deterministic_signing( - self, - ) -> bool: - return self._deterministic_signing - - -generate_private_key = rust_openssl.ec.generate_private_key - - -def derive_private_key( - private_value: int, - curve: EllipticCurve, - backend: typing.Any = None, -) -> EllipticCurvePrivateKey: - if not isinstance(private_value, int): - raise TypeError("private_value must be an integer type.") - - if private_value <= 0: - raise ValueError("private_value must be a positive integer.") - - return rust_openssl.ec.derive_private_key(private_value, curve) - - -class ECDH: - pass - - -_OID_TO_CURVE = { - EllipticCurveOID.SECP192R1: SECP192R1, - EllipticCurveOID.SECP224R1: SECP224R1, - EllipticCurveOID.SECP256K1: SECP256K1, - EllipticCurveOID.SECP256R1: SECP256R1, - EllipticCurveOID.SECP384R1: SECP384R1, - EllipticCurveOID.SECP521R1: SECP521R1, - EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, - EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, - EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, - EllipticCurveOID.SECT163K1: SECT163K1, - EllipticCurveOID.SECT163R2: SECT163R2, - EllipticCurveOID.SECT233K1: SECT233K1, - EllipticCurveOID.SECT233R1: SECT233R1, - EllipticCurveOID.SECT283K1: SECT283K1, - EllipticCurveOID.SECT283R1: SECT283R1, - EllipticCurveOID.SECT409K1: SECT409K1, - EllipticCurveOID.SECT409R1: SECT409R1, - EllipticCurveOID.SECT571K1: SECT571K1, - EllipticCurveOID.SECT571R1: SECT571R1, -} - - -def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: - try: - return _OID_TO_CURVE[oid] - except KeyError: - raise LookupError( - "The provided object identifier has no matching elliptic " - "curve class" - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py deleted file mode 100644 index e576dc9..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py +++ /dev/null @@ -1,129 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class Ed25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: Buffer, data: Buffer) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed25519PublicKey: - """ - Returns a copy. - """ - - -Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) - - -class Ed25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed25519PublicKey: - """ - The Ed25519PublicKey derived from the private key. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def sign(self, data: Buffer) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed25519PrivateKey: - """ - Returns a copy. - """ - - -Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py deleted file mode 100644 index 89db209..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py +++ /dev/null @@ -1,131 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class Ed448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: Buffer, data: Buffer) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> Ed448PublicKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "ed448"): - Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) - - -class Ed448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed448PublicKey: - """ - The Ed448PublicKey derived from the private key. - """ - - @abc.abstractmethod - def sign(self, data: Buffer) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def __copy__(self) -> Ed448PrivateKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py deleted file mode 100644 index b4babf4..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py +++ /dev/null @@ -1,113 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import ( - AsymmetricPadding as AsymmetricPadding, -) -from cryptography.hazmat.primitives.asymmetric import rsa - - -class PKCS1v15(AsymmetricPadding): - name = "EMSA-PKCS1-v1_5" - - -class _MaxLength: - "Sentinel value for `MAX_LENGTH`." - - -class _Auto: - "Sentinel value for `AUTO`." - - -class _DigestLength: - "Sentinel value for `DIGEST_LENGTH`." - - -class PSS(AsymmetricPadding): - MAX_LENGTH = _MaxLength() - AUTO = _Auto() - DIGEST_LENGTH = _DigestLength() - name = "EMSA-PSS" - _salt_length: int | _MaxLength | _Auto | _DigestLength - - def __init__( - self, - mgf: MGF, - salt_length: int | _MaxLength | _Auto | _DigestLength, - ) -> None: - self._mgf = mgf - - if not isinstance( - salt_length, (int, _MaxLength, _Auto, _DigestLength) - ): - raise TypeError( - "salt_length must be an integer, MAX_LENGTH, " - "DIGEST_LENGTH, or AUTO" - ) - - if isinstance(salt_length, int) and salt_length < 0: - raise ValueError("salt_length must be zero or greater.") - - self._salt_length = salt_length - - @property - def mgf(self) -> MGF: - return self._mgf - - -class OAEP(AsymmetricPadding): - name = "EME-OAEP" - - def __init__( - self, - mgf: MGF, - algorithm: hashes.HashAlgorithm, - label: bytes | None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._mgf = mgf - self._algorithm = algorithm - self._label = label - - @property - def algorithm(self) -> hashes.HashAlgorithm: - return self._algorithm - - @property - def mgf(self) -> MGF: - return self._mgf - - -class MGF(metaclass=abc.ABCMeta): - _algorithm: hashes.HashAlgorithm - - -class MGF1(MGF): - MAX_LENGTH = _MaxLength() - - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._algorithm = algorithm - - -def calculate_max_pss_salt_length( - key: rsa.RSAPrivateKey | rsa.RSAPublicKey, - hash_algorithm: hashes.HashAlgorithm, -) -> int: - if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - raise TypeError("key must be an RSA public or private key") - # bit length - 1 per RFC 3447 - emlen = (key.key_size + 6) // 8 - salt_length = emlen - hash_algorithm.digest_size - 2 - assert salt_length >= 0 - return salt_length diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py deleted file mode 100644 index 2e192aa..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py +++ /dev/null @@ -1,277 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import random -import typing -from math import gcd - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class RSAPrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Decrypts the provided ciphertext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_key(self) -> RSAPublicKey: - """ - The RSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_numbers(self) -> RSAPrivateNumbers: - """ - Returns an RSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __copy__(self) -> RSAPrivateKey: - """ - Returns a copy. - """ - - -RSAPrivateKeyWithSerialization = RSAPrivateKey -RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) - - -class RSAPublicKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Encrypts the given plaintext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_numbers(self) -> RSAPublicNumbers: - """ - Returns an RSAPublicNumbers - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: bytes, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def recover_data_from_signature( - self, - signature: bytes, - padding: AsymmetricPadding, - algorithm: hashes.HashAlgorithm | None, - ) -> bytes: - """ - Recovers the original data from the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> RSAPublicKey: - """ - Returns a copy. - """ - - -RSAPublicKeyWithSerialization = RSAPublicKey -RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) - -RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers -RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers - - -def generate_private_key( - public_exponent: int, - key_size: int, - backend: typing.Any = None, -) -> RSAPrivateKey: - _verify_rsa_parameters(public_exponent, key_size) - return rust_openssl.rsa.generate_private_key(public_exponent, key_size) - - -def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: - if public_exponent not in (3, 65537): - raise ValueError( - "public_exponent must be either 3 (for legacy compatibility) or " - "65537. Almost everyone should choose 65537 here!" - ) - - if key_size < 1024: - raise ValueError("key_size must be at least 1024-bits.") - - -def _modinv(e: int, m: int) -> int: - """ - Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 - """ - x1, x2 = 1, 0 - a, b = e, m - while b > 0: - q, r = divmod(a, b) - xn = x1 - q * x2 - a, b, x1, x2 = b, r, x2, xn - return x1 % m - - -def rsa_crt_iqmp(p: int, q: int) -> int: - """ - Compute the CRT (q ** -1) % p value from RSA primes p and q. - """ - return _modinv(q, p) - - -def rsa_crt_dmp1(private_exponent: int, p: int) -> int: - """ - Compute the CRT private_exponent % (p - 1) value from the RSA - private_exponent (d) and p. - """ - return private_exponent % (p - 1) - - -def rsa_crt_dmq1(private_exponent: int, q: int) -> int: - """ - Compute the CRT private_exponent % (q - 1) value from the RSA - private_exponent (d) and q. - """ - return private_exponent % (q - 1) - - -def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: - """ - Compute the RSA private_exponent (d) given the public exponent (e) - and the RSA primes p and q. - - This uses the Carmichael totient function to generate the - smallest possible working value of the private exponent. - """ - # This lambda_n is the Carmichael totient function. - # The original RSA paper uses the Euler totient function - # here: phi_n = (p - 1) * (q - 1) - # Either version of the private exponent will work, but the - # one generated by the older formulation may be larger - # than necessary. (lambda_n always divides phi_n) - # - # TODO: Replace with lcm(p - 1, q - 1) once the minimum - # supported Python version is >= 3.9. - lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) - return _modinv(e, lambda_n) - - -# Controls the number of iterations rsa_recover_prime_factors will perform -# to obtain the prime factors. -_MAX_RECOVERY_ATTEMPTS = 500 - - -def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: - """ - Compute factors p and q from the private exponent d. We assume that n has - no more than two factors. This function is adapted from code in PyCrypto. - """ - # reject invalid values early - if d <= 1 or e <= 1: - raise ValueError("d, e can't be <= 1") - if 17 != pow(17, e * d, n): - raise ValueError("n, d, e don't match") - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d * e - 1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t % 2 == 0: - t = t // 2 - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = False - tries = 0 - while not spotted and tries < _MAX_RECOVERY_ATTEMPTS: - a = random.randint(2, n - 1) - tries += 1 - k = t - # Cycle through all values a^{t*2^i}=a^k - while k < ktot: - cand = pow(a, k, n) - # Check if a^k is a non-trivial root of unity (mod n) - if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: - # We have found a number such that (cand-1)(cand+1)=0 (mod n). - # Either of the terms divides n. - p = gcd(cand + 1, n) - spotted = True - break - k *= 2 - if not spotted: - raise ValueError("Unable to compute factors p and q from exponent d.") - # Found ! - q, r = divmod(n, p) - assert r == 0 - p, q = sorted((p, q), reverse=True) - return (p, q) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py deleted file mode 100644 index 1fe4eaf..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py +++ /dev/null @@ -1,111 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.hazmat.primitives.asymmetric import ( - dh, - dsa, - ec, - ed448, - ed25519, - rsa, - x448, - x25519, -) - -# Every asymmetric key type -PublicKeyTypes = typing.Union[ - dh.DHPublicKey, - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -PUBLIC_KEY_TYPES = PublicKeyTypes -utils.deprecated( - PUBLIC_KEY_TYPES, - __name__, - "Use PublicKeyTypes instead", - utils.DeprecatedIn40, - name="PUBLIC_KEY_TYPES", -) -# Every asymmetric key type -PrivateKeyTypes = typing.Union[ - dh.DHPrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - x25519.X25519PrivateKey, - x448.X448PrivateKey, -] -PRIVATE_KEY_TYPES = PrivateKeyTypes -utils.deprecated( - PRIVATE_KEY_TYPES, - __name__, - "Use PrivateKeyTypes instead", - utils.DeprecatedIn40, - name="PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate public key types -CertificateIssuerPrivateKeyTypes = typing.Union[ - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, -] -CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes -utils.deprecated( - CERTIFICATE_PRIVATE_KEY_TYPES, - __name__, - "Use CertificateIssuerPrivateKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate private key types -CertificateIssuerPublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, -] -CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes -utils.deprecated( - CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, - __name__, - "Use CertificateIssuerPublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", -) -# This type removes DHPublicKey. x448/x25519 can be a public key -# but cannot be used in signing so they are allowed here. -CertificatePublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes -utils.deprecated( - CERTIFICATE_PUBLIC_KEY_TYPES, - __name__, - "Use CertificatePublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PUBLIC_KEY_TYPES", -) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py deleted file mode 100644 index 826b956..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.primitives import hashes - -decode_dss_signature = asn1.decode_dss_signature -encode_dss_signature = asn1.encode_dss_signature - - -class Prehashed: - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of HashAlgorithm.") - - self._algorithm = algorithm - self._digest_size = algorithm.digest_size - - @property - def digest_size(self) -> int: - return self._digest_size diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py deleted file mode 100644 index a499376..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py +++ /dev/null @@ -1,122 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class X25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> X25519PublicKey: - """ - Returns a copy. - """ - - -X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) - - -class X25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - return rust_openssl.x25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X25519PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X25519PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - @abc.abstractmethod - def __copy__(self) -> X25519PrivateKey: - """ - Returns a copy. - """ - - -X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py deleted file mode 100644 index c6fd71b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py +++ /dev/null @@ -1,125 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization -from cryptography.utils import Buffer - - -class X448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __copy__(self) -> X448PublicKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - X448PublicKey.register(rust_openssl.x448.X448PublicKey) - - -class X448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.generate_key() - - @classmethod - def from_private_bytes(cls, data: Buffer) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X448PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X448PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - @abc.abstractmethod - def __copy__(self) -> X448PrivateKey: - """ - Returns a copy. - """ - - -if hasattr(rust_openssl, "x448"): - X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py deleted file mode 100644 index 10c15d0..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.base import ( - AEADCipherContext, - AEADDecryptionContext, - AEADEncryptionContext, - Cipher, - CipherContext, -) - -__all__ = [ - "AEADCipherContext", - "AEADDecryptionContext", - "AEADEncryptionContext", - "BlockCipherAlgorithm", - "Cipher", - "CipherAlgorithm", - "CipherContext", -] diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 06c108d..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc deleted file mode 100644 index 7ba8098..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc deleted file mode 100644 index 8442c8e..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc deleted file mode 100644 index 09fbb40..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc deleted file mode 100644 index d7e6a4a..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py deleted file mode 100644 index c8a582d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = [ - "AESCCM", - "AESGCM", - "AESGCMSIV", - "AESOCB3", - "AESSIV", - "ChaCha20Poly1305", -] - -AESGCM = rust_openssl.aead.AESGCM -ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 -AESCCM = rust_openssl.aead.AESCCM -AESSIV = rust_openssl.aead.AESSIV -AESOCB3 = rust_openssl.aead.AESOCB3 -AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py deleted file mode 100644 index 9d65004..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py +++ /dev/null @@ -1,183 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography import utils -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - ARC4 as ARC4, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - CAST5 as CAST5, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - IDEA as IDEA, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - SEED as SEED, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - Blowfish as Blowfish, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - TripleDES as TripleDES, -) -from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size -from cryptography.hazmat.primitives.ciphers import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) - - -class AES(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - # 512 added to support AES-256-XTS, which uses 512-bit keys - key_sizes = frozenset([128, 192, 256, 512]) - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class AES128(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([128]) - key_size = 128 - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - -class AES256(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([256]) - key_size = 256 - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - -class Camellia(BlockCipherAlgorithm): - name = "camellia" - block_size = 128 - key_sizes = frozenset([128, 192, 256]) - - def __init__(self, key: utils.Buffer): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -utils.deprecated( - ARC4, - __name__, - "ARC4 has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", - utils.DeprecatedIn43, - name="ARC4", -) - - -utils.deprecated( - TripleDES, - __name__, - "TripleDES has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", - utils.DeprecatedIn43, - name="TripleDES", -) - -utils.deprecated( - Blowfish, - __name__, - "Blowfish has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.Blowfish and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 45.0.0.", - utils.DeprecatedIn37, - name="Blowfish", -) - - -utils.deprecated( - CAST5, - __name__, - "CAST5 has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.CAST5 and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 45.0.0.", - utils.DeprecatedIn37, - name="CAST5", -) - - -utils.deprecated( - IDEA, - __name__, - "IDEA has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.IDEA and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 45.0.0.", - utils.DeprecatedIn37, - name="IDEA", -) - - -utils.deprecated( - SEED, - __name__, - "SEED has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.SEED and " - "will be removed from " - "cryptography.hazmat.primitives.ciphers.algorithms in 45.0.0.", - utils.DeprecatedIn37, - name="SEED", -) - - -class ChaCha20(CipherAlgorithm): - name = "ChaCha20" - key_sizes = frozenset([256]) - - def __init__(self, key: utils.Buffer, nonce: utils.Buffer): - self.key = _verify_key_size(self, key) - utils._check_byteslike("nonce", nonce) - - if len(nonce) != 16: - raise ValueError("nonce must be 128-bits (16 bytes)") - - self._nonce = nonce - - @property - def nonce(self) -> utils.Buffer: - return self._nonce - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SM4(BlockCipherAlgorithm): - name = "SM4" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py deleted file mode 100644 index 24fceea..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py +++ /dev/null @@ -1,146 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm -from cryptography.hazmat.primitives.ciphers import modes -from cryptography.utils import Buffer - - -class CipherContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: Buffer) -> bytes: - """ - Processes the provided bytes through the cipher and returns the results - as bytes. - """ - - @abc.abstractmethod - def update_into(self, data: Buffer, buf: Buffer) -> int: - """ - Processes the provided bytes and writes the resulting data into the - provided buffer. Returns the number of bytes written. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Returns the results of processing the final block as bytes. - """ - - @abc.abstractmethod - def reset_nonce(self, nonce: bytes) -> None: - """ - Resets the nonce for the cipher context to the provided value. - Raises an exception if it does not support reset or if the - provided nonce does not have a valid length. - """ - - -class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def authenticate_additional_data(self, data: Buffer) -> None: - """ - Authenticates the provided bytes. - """ - - -class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def finalize_with_tag(self, tag: bytes) -> bytes: - """ - Returns the results of processing the final block as bytes and allows - delayed passing of the authentication tag. - """ - - -class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes: - """ - Returns tag bytes. This is only available after encryption is - finalized. - """ - - -Mode = typing.TypeVar( - "Mode", bound=typing.Optional[modes.Mode], covariant=True -) - - -class Cipher(typing.Generic[Mode]): - def __init__( - self, - algorithm: CipherAlgorithm, - mode: Mode, - backend: typing.Any = None, - ) -> None: - if not isinstance(algorithm, CipherAlgorithm): - raise TypeError("Expected interface of CipherAlgorithm.") - - if mode is not None: - # mypy needs this assert to narrow the type from our generic - # type. Maybe it won't some time in the future. - assert isinstance(mode, modes.Mode) - mode.validate_for_algorithm(algorithm) - - self.algorithm = algorithm - self.mode = mode - - @typing.overload - def encryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADEncryptionContext: ... - - @typing.overload - def encryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def encryptor(self): - if isinstance(self.mode, modes.ModeWithAuthenticationTag): - if self.mode.tag is not None: - raise ValueError( - "Authentication tag must be None when encrypting." - ) - - return rust_openssl.ciphers.create_encryption_ctx( - self.algorithm, self.mode - ) - - @typing.overload - def decryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADDecryptionContext: ... - - @typing.overload - def decryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def decryptor(self): - return rust_openssl.ciphers.create_decryption_ctx( - self.algorithm, self.mode - ) - - -_CIPHER_TYPE = Cipher[ - typing.Union[ - modes.ModeWithNonce, - modes.ModeWithTweak, - modes.ECB, - modes.ModeWithInitializationVector, - None, - ] -] - -CipherContext.register(rust_openssl.ciphers.CipherContext) -AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) -AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py deleted file mode 100644 index 36c555c..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py +++ /dev/null @@ -1,268 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers import algorithms - - -class Mode(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "ECB", "CBC"). - """ - - @abc.abstractmethod - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - """ - Checks that all the necessary invariants of this (mode, algorithm) - combination are met. - """ - - -class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def initialization_vector(self) -> utils.Buffer: - """ - The value of the initialization vector for this mode as bytes. - """ - - -class ModeWithTweak(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tweak(self) -> utils.Buffer: - """ - The value of the tweak for this mode as bytes. - """ - - -class ModeWithNonce(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def nonce(self) -> utils.Buffer: - """ - The value of the nonce for this mode as bytes. - """ - - -class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes | None: - """ - The value of the tag supplied to the constructor of this mode. - """ - - -def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: - if algorithm.key_size > 256 and algorithm.name == "AES": - raise ValueError( - "Only 128, 192, and 256 bit keys are allowed for this AES mode" - ) - - -def _check_iv_length( - self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm -) -> None: - iv_len = len(self.initialization_vector) - if iv_len * 8 != algorithm.block_size: - raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") - - -def _check_nonce_length( - nonce: utils.Buffer, name: str, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{name} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - if len(nonce) * 8 != algorithm.block_size: - raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") - - -def _check_iv_and_key_length( - self: ModeWithInitializationVector, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{self} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - _check_aes_key_length(self, algorithm) - _check_iv_length(self, algorithm) - - -class CBC(ModeWithInitializationVector): - name = "CBC" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class XTS(ModeWithTweak): - name = "XTS" - - def __init__(self, tweak: utils.Buffer): - utils._check_byteslike("tweak", tweak) - - if len(tweak) != 16: - raise ValueError("tweak must be 128-bits (16 bytes)") - - self._tweak = tweak - - @property - def tweak(self) -> utils.Buffer: - return self._tweak - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): - raise TypeError( - "The AES128 and AES256 classes do not support XTS, please use " - "the standard AES class instead." - ) - - if algorithm.key_size not in (256, 512): - raise ValueError( - "The XTS specification requires a 256-bit key for AES-128-XTS" - " and 512-bit key for AES-256-XTS" - ) - - -class ECB(Mode): - name = "ECB" - - validate_for_algorithm = _check_aes_key_length - - -class OFB(ModeWithInitializationVector): - name = "OFB" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB(ModeWithInitializationVector): - name = "CFB" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB8(ModeWithInitializationVector): - name = "CFB8" - - def __init__(self, initialization_vector: utils.Buffer): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CTR(ModeWithNonce): - name = "CTR" - - def __init__(self, nonce: utils.Buffer): - utils._check_byteslike("nonce", nonce) - self._nonce = nonce - - @property - def nonce(self) -> utils.Buffer: - return self._nonce - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - _check_nonce_length(self.nonce, self.name, algorithm) - - -class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): - name = "GCM" - _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 - _MAX_AAD_BYTES = (2**64) // 8 - - def __init__( - self, - initialization_vector: utils.Buffer, - tag: bytes | None = None, - min_tag_length: int = 16, - ): - # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive - # This is a sane limit anyway so we'll enforce it here. - utils._check_byteslike("initialization_vector", initialization_vector) - if len(initialization_vector) < 8 or len(initialization_vector) > 128: - raise ValueError( - "initialization_vector must be between 8 and 128 bytes (64 " - "and 1024 bits)." - ) - self._initialization_vector = initialization_vector - if tag is not None: - utils._check_bytes("tag", tag) - if min_tag_length < 4: - raise ValueError("min_tag_length must be >= 4") - if len(tag) < min_tag_length: - raise ValueError( - f"Authentication tag must be {min_tag_length} bytes or " - "longer." - ) - self._tag = tag - self._min_tag_length = min_tag_length - - @property - def tag(self) -> bytes | None: - return self._tag - - @property - def initialization_vector(self) -> utils.Buffer: - return self._initialization_vector - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - "GCM requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - block_size_bytes = algorithm.block_size // 8 - if self._tag is not None and len(self._tag) > block_size_bytes: - raise ValueError( - f"Authentication tag cannot be more than {block_size_bytes} " - "bytes." - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py deleted file mode 100644 index 2c67ce2..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py +++ /dev/null @@ -1,10 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["CMAC"] -CMAC = rust_openssl.cmac.CMAC diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py deleted file mode 100644 index 3975c71..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import hmac - - -def bytes_eq(a: bytes, b: bytes) -> bool: - if not isinstance(a, bytes) or not isinstance(b, bytes): - raise TypeError("a and b must be bytes.") - - return hmac.compare_digest(a, b) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py deleted file mode 100644 index 4b55ec3..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py +++ /dev/null @@ -1,246 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.utils import Buffer - -__all__ = [ - "MD5", - "SHA1", - "SHA3_224", - "SHA3_256", - "SHA3_384", - "SHA3_512", - "SHA224", - "SHA256", - "SHA384", - "SHA512", - "SHA512_224", - "SHA512_256", - "SHAKE128", - "SHAKE256", - "SM3", - "BLAKE2b", - "BLAKE2s", - "ExtendableOutputFunction", - "Hash", - "HashAlgorithm", - "HashContext", - "XOFHash", -] - - -class HashAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this algorithm (e.g. "sha256", "md5"). - """ - - @property - @abc.abstractmethod - def digest_size(self) -> int: - """ - The size of the resulting digest in bytes. - """ - - @property - @abc.abstractmethod - def block_size(self) -> int | None: - """ - The internal block size of the hash function, or None if the hash - function does not use blocks internally (e.g. SHA3). - """ - - -class HashContext(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm(self) -> HashAlgorithm: - """ - A HashAlgorithm that will be used by this context. - """ - - @abc.abstractmethod - def update(self, data: Buffer) -> None: - """ - Processes the provided bytes through the hash. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalizes the hash context and returns the hash digest as bytes. - """ - - @abc.abstractmethod - def copy(self) -> HashContext: - """ - Return a HashContext that is a copy of the current context. - """ - - -Hash = rust_openssl.hashes.Hash -HashContext.register(Hash) - -XOFHash = rust_openssl.hashes.XOFHash - - -class ExtendableOutputFunction(metaclass=abc.ABCMeta): - """ - An interface for extendable output functions. - """ - - -class SHA1(HashAlgorithm): - name = "sha1" - digest_size = 20 - block_size = 64 - - -class SHA512_224(HashAlgorithm): # noqa: N801 - name = "sha512-224" - digest_size = 28 - block_size = 128 - - -class SHA512_256(HashAlgorithm): # noqa: N801 - name = "sha512-256" - digest_size = 32 - block_size = 128 - - -class SHA224(HashAlgorithm): - name = "sha224" - digest_size = 28 - block_size = 64 - - -class SHA256(HashAlgorithm): - name = "sha256" - digest_size = 32 - block_size = 64 - - -class SHA384(HashAlgorithm): - name = "sha384" - digest_size = 48 - block_size = 128 - - -class SHA512(HashAlgorithm): - name = "sha512" - digest_size = 64 - block_size = 128 - - -class SHA3_224(HashAlgorithm): # noqa: N801 - name = "sha3-224" - digest_size = 28 - block_size = None - - -class SHA3_256(HashAlgorithm): # noqa: N801 - name = "sha3-256" - digest_size = 32 - block_size = None - - -class SHA3_384(HashAlgorithm): # noqa: N801 - name = "sha3-384" - digest_size = 48 - block_size = None - - -class SHA3_512(HashAlgorithm): # noqa: N801 - name = "sha3-512" - digest_size = 64 - block_size = None - - -class SHAKE128(HashAlgorithm, ExtendableOutputFunction): - name = "shake128" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SHAKE256(HashAlgorithm, ExtendableOutputFunction): - name = "shake256" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class MD5(HashAlgorithm): - name = "md5" - digest_size = 16 - block_size = 64 - - -class BLAKE2b(HashAlgorithm): - name = "blake2b" - _max_digest_size = 64 - _min_digest_size = 1 - block_size = 128 - - def __init__(self, digest_size: int): - if digest_size != 64: - raise ValueError("Digest size must be 64") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class BLAKE2s(HashAlgorithm): - name = "blake2s" - block_size = 64 - _max_digest_size = 32 - _min_digest_size = 1 - - def __init__(self, digest_size: int): - if digest_size != 32: - raise ValueError("Digest size must be 32") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SM3(HashAlgorithm): - name = "sm3" - digest_size = 32 - block_size = 64 diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py deleted file mode 100644 index a9442d5..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import hashes - -__all__ = ["HMAC"] - -HMAC = rust_openssl.hmac.HMAC -hashes.HashContext.register(HMAC) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py deleted file mode 100644 index 79bb459..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - - -class KeyDerivationFunction(metaclass=abc.ABCMeta): - @abc.abstractmethod - def derive(self, key_material: bytes) -> bytes: - """ - Deterministically generates and returns a new key based on the existing - key material. - """ - - @abc.abstractmethod - def verify(self, key_material: bytes, expected_key: bytes) -> None: - """ - Checks whether the key generated by the key material matches the - expected derived key. Raises an exception if they do not match. - """ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 10a5527..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-310.pyc deleted file mode 100644 index cce88e6..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc deleted file mode 100644 index 2e438df..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc deleted file mode 100644 index e24277d..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc deleted file mode 100644 index 9d467e1..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc deleted file mode 100644 index 82f4102..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc deleted file mode 100644 index 3712bdf..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc deleted file mode 100644 index 3f9b10c..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/argon2.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/argon2.py deleted file mode 100644 index 405fc8d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/argon2.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -Argon2id = rust_openssl.kdf.Argon2id -KeyDerivationFunction.register(Argon2id) - -__all__ = ["Argon2id"] diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py deleted file mode 100644 index 1b92841..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py +++ /dev/null @@ -1,125 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Callable - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes, hmac -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -def _common_args_checks( - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, -) -> None: - max_length = algorithm.digest_size * (2**32 - 1) - if length > max_length: - raise ValueError(f"Cannot derive keys larger than {max_length} bits.") - if otherinfo is not None: - utils._check_bytes("otherinfo", otherinfo) - - -def _concatkdf_derive( - key_material: utils.Buffer, - length: int, - auxfn: Callable[[], hashes.HashContext], - otherinfo: bytes, -) -> bytes: - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while length > outlen: - h = auxfn() - h.update(_int_to_u32be(counter)) - h.update(key_material) - h.update(otherinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[:length] - - -class ConcatKDFHash(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - self._used = False - - def _hash(self) -> hashes.Hash: - return hashes.Hash(self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hash, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class ConcatKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes | None, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - if algorithm.block_size is None: - raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") - - if salt is None: - salt = b"\x00" * algorithm.block_size - else: - utils._check_bytes("salt", salt) - - self._salt = salt - - self._used = False - - def _hmac(self) -> hmac.HMAC: - return hmac.HMAC(self._salt, self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hmac, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py deleted file mode 100644 index ce11c54..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py +++ /dev/null @@ -1,101 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes, hmac -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class HKDF(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes | None, - info: bytes | None, - backend: typing.Any = None, - ): - self._algorithm = algorithm - - if salt is None: - salt = b"\x00" * self._algorithm.digest_size - else: - utils._check_bytes("salt", salt) - - self._salt = salt - - self._hkdf_expand = HKDFExpand(self._algorithm, length, info) - - def _extract(self, key_material: utils.Buffer) -> bytes: - h = hmac.HMAC(self._salt, self._algorithm) - h.update(key_material) - return h.finalize() - - def derive(self, key_material: utils.Buffer) -> bytes: - utils._check_byteslike("key_material", key_material) - return self._hkdf_expand.derive(self._extract(key_material)) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class HKDFExpand(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - info: bytes | None, - backend: typing.Any = None, - ): - self._algorithm = algorithm - - max_length = 255 * algorithm.digest_size - - if length > max_length: - raise ValueError( - f"Cannot derive keys larger than {max_length} octets." - ) - - self._length = length - - if info is None: - info = b"" - else: - utils._check_bytes("info", info) - - self._info = info - - self._used = False - - def _expand(self, key_material: utils.Buffer) -> bytes: - output = [b""] - counter = 1 - - while self._algorithm.digest_size * (len(output) - 1) < self._length: - h = hmac.HMAC(key_material, self._algorithm) - h.update(output[-1]) - h.update(self._info) - h.update(bytes([counter])) - output.append(h.finalize()) - counter += 1 - - return b"".join(output)[: self._length] - - def derive(self, key_material: utils.Buffer) -> bytes: - utils._check_byteslike("key_material", key_material) - if self._used: - raise AlreadyFinalized - - self._used = True - return self._expand(key_material) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py deleted file mode 100644 index d6c3ff3..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py +++ /dev/null @@ -1,305 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Callable - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.primitives import ( - ciphers, - cmac, - constant_time, - hashes, - hmac, -) -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class Mode(utils.Enum): - CounterMode = "ctr" - - -class CounterLocation(utils.Enum): - BeforeFixed = "before_fixed" - AfterFixed = "after_fixed" - MiddleFixed = "middle_fixed" - - -class _KBKDFDeriver: - def __init__( - self, - prf: Callable, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - break_location: int | None, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - ): - assert callable(prf) - - if not isinstance(mode, Mode): - raise TypeError("mode must be of type Mode") - - if not isinstance(location, CounterLocation): - raise TypeError("location must be of type CounterLocation") - - if break_location is None and location is CounterLocation.MiddleFixed: - raise ValueError("Please specify a break_location") - - if ( - break_location is not None - and location != CounterLocation.MiddleFixed - ): - raise ValueError( - "break_location is ignored when location is not" - " CounterLocation.MiddleFixed" - ) - - if break_location is not None and not isinstance(break_location, int): - raise TypeError("break_location must be an integer") - - if break_location is not None and break_location < 0: - raise ValueError("break_location must be a positive integer") - - if (label or context) and fixed: - raise ValueError( - "When supplying fixed data, label and context are ignored." - ) - - if rlen is None or not self._valid_byte_length(rlen): - raise ValueError("rlen must be between 1 and 4") - - if llen is None and fixed is None: - raise ValueError("Please specify an llen") - - if llen is not None and not isinstance(llen, int): - raise TypeError("llen must be an integer") - - if llen == 0: - raise ValueError("llen must be non-zero") - - if label is None: - label = b"" - - if context is None: - context = b"" - - utils._check_bytes("label", label) - utils._check_bytes("context", context) - self._prf = prf - self._mode = mode - self._length = length - self._rlen = rlen - self._llen = llen - self._location = location - self._break_location = break_location - self._label = label - self._context = context - self._used = False - self._fixed_data = fixed - - @staticmethod - def _valid_byte_length(value: int) -> bool: - if not isinstance(value, int): - raise TypeError("value must be of type int") - - value_bin = utils.int_to_bytes(1, value) - if not 1 <= len(value_bin) <= 4: - return False - return True - - def derive( - self, key_material: utils.Buffer, prf_output_size: int - ) -> bytes: - if self._used: - raise AlreadyFinalized - - utils._check_byteslike("key_material", key_material) - self._used = True - - # inverse floor division (equivalent to ceiling) - rounds = -(-self._length // prf_output_size) - - output = [b""] - - # For counter mode, the number of iterations shall not be - # larger than 2^r-1, where r <= 32 is the binary length of the counter - # This ensures that the counter values used as an input to the - # PRF will not repeat during a particular call to the KDF function. - r_bin = utils.int_to_bytes(1, self._rlen) - if rounds > pow(2, len(r_bin) * 8) - 1: - raise ValueError("There are too many iterations.") - - fixed = self._generate_fixed_input() - - if self._location == CounterLocation.BeforeFixed: - data_before_ctr = b"" - data_after_ctr = fixed - elif self._location == CounterLocation.AfterFixed: - data_before_ctr = fixed - data_after_ctr = b"" - else: - if isinstance( - self._break_location, int - ) and self._break_location > len(fixed): - raise ValueError("break_location offset > len(fixed)") - data_before_ctr = fixed[: self._break_location] - data_after_ctr = fixed[self._break_location :] - - for i in range(1, rounds + 1): - h = self._prf(key_material) - - counter = utils.int_to_bytes(i, self._rlen) - input_data = data_before_ctr + counter + data_after_ctr - - h.update(input_data) - - output.append(h.finalize()) - - return b"".join(output)[: self._length] - - def _generate_fixed_input(self) -> bytes: - if self._fixed_data and isinstance(self._fixed_data, bytes): - return self._fixed_data - - l_val = utils.int_to_bytes(self._length * 8, self._llen) - - return b"".join([self._label, b"\x00", self._context, l_val]) - - -class KBKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hash algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.hmac_supported(algorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hmac algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - self._algorithm = algorithm - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, key_material: bytes) -> hmac.HMAC: - return hmac.HMAC(key_material, self._algorithm) - - def derive(self, key_material: utils.Buffer) -> bytes: - return self._deriver.derive(key_material, self._algorithm.digest_size) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class KBKDFCMAC(KeyDerivationFunction): - def __init__( - self, - algorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not issubclass( - algorithm, ciphers.BlockCipherAlgorithm - ) or not issubclass(algorithm, ciphers.CipherAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - self._algorithm = algorithm - self._cipher: ciphers.BlockCipherAlgorithm | None = None - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, _: bytes) -> cmac.CMAC: - assert self._cipher is not None - - return cmac.CMAC(self._cipher) - - def derive(self, key_material: utils.Buffer) -> bytes: - self._cipher = self._algorithm(key_material) - - assert self._cipher is not None - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.cmac_algorithm_supported(self._cipher): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - return self._deriver.derive(key_material, self._cipher.block_size // 8) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py deleted file mode 100644 index d539f13..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ /dev/null @@ -1,62 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class PBKDF2HMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes, - iterations: int, - backend: typing.Any = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.pbkdf2_hmac_supported(algorithm): - raise UnsupportedAlgorithm( - f"{algorithm.name} is not supported for PBKDF2.", - _Reasons.UNSUPPORTED_HASH, - ) - self._used = False - self._algorithm = algorithm - self._length = length - utils._check_bytes("salt", salt) - self._salt = salt - self._iterations = iterations - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized("PBKDF2 instances can only be used once.") - self._used = True - - return rust_openssl.kdf.derive_pbkdf2_hmac( - key_material, - self._algorithm, - self._salt, - self._iterations, - self._length, - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - derived_key = self.derive(key_material) - if not constant_time.bytes_eq(derived_key, expected_key): - raise InvalidKey("Keys do not match.") diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py deleted file mode 100644 index f791cee..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import sys - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -# This is used by the scrypt tests to skip tests that require more memory -# than the MEM_LIMIT -_MEM_LIMIT = sys.maxsize // 2 - -Scrypt = rust_openssl.kdf.Scrypt -KeyDerivationFunction.register(Scrypt) - -__all__ = ["Scrypt"] diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py deleted file mode 100644 index 63870cd..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -class X963KDF(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - sharedinfo: bytes | None, - backend: typing.Any = None, - ): - max_len = algorithm.digest_size * (2**32 - 1) - if length > max_len: - raise ValueError(f"Cannot derive keys larger than {max_len} bits.") - if sharedinfo is not None: - utils._check_bytes("sharedinfo", sharedinfo) - - self._algorithm = algorithm - self._length = length - self._sharedinfo = sharedinfo - self._used = False - - def derive(self, key_material: utils.Buffer) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while self._length > outlen: - h = hashes.Hash(self._algorithm) - h.update(key_material) - h.update(_int_to_u32be(counter)) - if self._sharedinfo is not None: - h.update(self._sharedinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[: self._length] - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py deleted file mode 100644 index b93d87d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py +++ /dev/null @@ -1,177 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import ECB -from cryptography.hazmat.primitives.constant_time import bytes_eq - - -def _wrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> bytes: - # RFC 3394 Key Wrap - 2.2.1 (index method) - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - n = len(r) - for j in range(6): - for i in range(n): - # every encryption operation is a discrete 16 byte chunk (because - # AES has a 128-bit block size) and since we're using ECB it is - # safe to reuse the encryptor for the entire operation - b = encryptor.update(a + r[i]) - a = ( - int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") - r[i] = b[-8:] - - assert encryptor.finalize() == b"" - - return a + b"".join(r) - - -def aes_key_wrap( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(key_to_wrap) < 16: - raise ValueError("The key to wrap must be at least 16 bytes") - - if len(key_to_wrap) % 8 != 0: - raise ValueError("The key to wrap must be a multiple of 8 bytes") - - a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, a, r) - - -def _unwrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> tuple[bytes, list[bytes]]: - # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - n = len(r) - for j in reversed(range(6)): - for i in reversed(range(n)): - atr = ( - int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") + r[i] - # every decryption operation is a discrete 16 byte chunk so - # it is safe to reuse the decryptor for the entire operation - b = decryptor.update(atr) - a = b[:8] - r[i] = b[-8:] - - assert decryptor.finalize() == b"" - return a, r - - -def aes_key_wrap_with_padding( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( - length=4, byteorder="big" - ) - # pad the key to wrap if necessary - pad = (8 - (len(key_to_wrap) % 8)) % 8 - key_to_wrap = key_to_wrap + b"\x00" * pad - if len(key_to_wrap) == 8: - # RFC 5649 - 4.1 - exactly 8 octets after padding - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - b = encryptor.update(aiv + key_to_wrap) - assert encryptor.finalize() == b"" - return b - else: - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, aiv, r) - - -def aes_key_unwrap_with_padding( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 16: - raise InvalidUnwrap("Must be at least 16 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(wrapped_key) == 16: - # RFC 5649 - 4.2 - exactly two 64-bit blocks - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - out = decryptor.update(wrapped_key) - assert decryptor.finalize() == b"" - a = out[:8] - data = out[8:] - n = 1 - else: - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - encrypted_aiv = r.pop(0) - n = len(r) - a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) - data = b"".join(r) - - # 1) Check that MSB(32,A) = A65959A6. - # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let - # MLI = LSB(32,A). - # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of - # the output data are zero. - mli = int.from_bytes(a[4:], byteorder="big") - b = (8 * n) - mli - if ( - not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") - or not 8 * (n - 1) < mli <= 8 * n - or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) - ): - raise InvalidUnwrap() - - if b == 0: - return data - else: - return data[:-b] - - -def aes_key_unwrap( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 24: - raise InvalidUnwrap("Must be at least 24 bytes") - - if len(wrapped_key) % 8 != 0: - raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - a = r.pop(0) - a, r = _unwrap_core(wrapping_key, a, r) - if not bytes_eq(a, aiv): - raise InvalidUnwrap() - - return b"".join(r) - - -class InvalidUnwrap(Exception): - pass diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py deleted file mode 100644 index f9cd1f1..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py +++ /dev/null @@ -1,69 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.hazmat.bindings._rust import ( - ANSIX923PaddingContext, - ANSIX923UnpaddingContext, - PKCS7PaddingContext, - PKCS7UnpaddingContext, -) - - -class PaddingContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: utils.Buffer) -> bytes: - """ - Pads the provided bytes and returns any available data as bytes. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalize the padding, returns bytes. - """ - - -def _byte_padding_check(block_size: int) -> None: - if not (0 <= block_size <= 2040): - raise ValueError("block_size must be in range(0, 2041).") - - if block_size % 8 != 0: - raise ValueError("block_size must be a multiple of 8.") - - -class PKCS7: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return PKCS7PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return PKCS7UnpaddingContext(self.block_size) - - -PaddingContext.register(PKCS7PaddingContext) -PaddingContext.register(PKCS7UnpaddingContext) - - -class ANSIX923: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return ANSIX923PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return ANSIX923UnpaddingContext(self.block_size) - - -PaddingContext.register(ANSIX923PaddingContext) -PaddingContext.register(ANSIX923UnpaddingContext) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py deleted file mode 100644 index 7f5a77a..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["Poly1305"] - -Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py deleted file mode 100644 index 62283cc..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._serialization import ( - BestAvailableEncryption, - Encoding, - KeySerializationEncryption, - NoEncryption, - ParameterFormat, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.base import ( - load_der_parameters, - load_der_private_key, - load_der_public_key, - load_pem_parameters, - load_pem_private_key, - load_pem_public_key, -) -from cryptography.hazmat.primitives.serialization.ssh import ( - SSHCertificate, - SSHCertificateBuilder, - SSHCertificateType, - SSHCertPrivateKeyTypes, - SSHCertPublicKeyTypes, - SSHPrivateKeyTypes, - SSHPublicKeyTypes, - load_ssh_private_key, - load_ssh_public_identity, - load_ssh_public_key, - ssh_key_fingerprint, -) - -__all__ = [ - "BestAvailableEncryption", - "Encoding", - "KeySerializationEncryption", - "NoEncryption", - "ParameterFormat", - "PrivateFormat", - "PublicFormat", - "SSHCertPrivateKeyTypes", - "SSHCertPublicKeyTypes", - "SSHCertificate", - "SSHCertificateBuilder", - "SSHCertificateType", - "SSHPrivateKeyTypes", - "SSHPublicKeyTypes", - "_KeySerializationEncryption", - "load_der_parameters", - "load_der_private_key", - "load_der_public_key", - "load_pem_parameters", - "load_pem_private_key", - "load_pem_public_key", - "load_ssh_private_key", - "load_ssh_public_identity", - "load_ssh_public_key", - "ssh_key_fingerprint", -] diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 4e3c96e..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc deleted file mode 100644 index b929c73..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc deleted file mode 100644 index f0ca4b0..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc deleted file mode 100644 index 261df97..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc deleted file mode 100644 index 8a7aebd..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py deleted file mode 100644 index e7c998b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -load_pem_private_key = rust_openssl.keys.load_pem_private_key -load_der_private_key = rust_openssl.keys.load_der_private_key - -load_pem_public_key = rust_openssl.keys.load_pem_public_key -load_der_public_key = rust_openssl.keys.load_der_public_key - -load_pem_parameters = rust_openssl.dh.from_pem_parameters -load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py deleted file mode 100644 index 58884ff..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py +++ /dev/null @@ -1,176 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing -from collections.abc import Iterable - -from cryptography import x509 -from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives._serialization import PBES as PBES -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - rsa, -) -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes - -__all__ = [ - "PBES", - "PKCS12Certificate", - "PKCS12KeyAndCertificates", - "PKCS12PrivateKeyTypes", - "load_key_and_certificates", - "load_pkcs12", - "serialize_java_truststore", - "serialize_key_and_certificates", -] - -PKCS12PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, -] - - -PKCS12Certificate = rust_pkcs12.PKCS12Certificate - - -class PKCS12KeyAndCertificates: - def __init__( - self, - key: PrivateKeyTypes | None, - cert: PKCS12Certificate | None, - additional_certs: list[PKCS12Certificate], - ): - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - if cert is not None and not isinstance(cert, PKCS12Certificate): - raise TypeError("cert must be a PKCS12Certificate object or None") - if not all( - isinstance(add_cert, PKCS12Certificate) - for add_cert in additional_certs - ): - raise TypeError( - "all values in additional_certs must be PKCS12Certificate" - " objects" - ) - self._key = key - self._cert = cert - self._additional_certs = additional_certs - - @property - def key(self) -> PrivateKeyTypes | None: - return self._key - - @property - def cert(self) -> PKCS12Certificate | None: - return self._cert - - @property - def additional_certs(self) -> list[PKCS12Certificate]: - return self._additional_certs - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PKCS12KeyAndCertificates): - return NotImplemented - - return ( - self.key == other.key - and self.cert == other.cert - and self.additional_certs == other.additional_certs - ) - - def __hash__(self) -> int: - return hash((self.key, self.cert, tuple(self.additional_certs))) - - def __repr__(self) -> str: - fmt = ( - "" - ) - return fmt.format(self.key, self.cert, self.additional_certs) - - -load_key_and_certificates = rust_pkcs12.load_key_and_certificates -load_pkcs12 = rust_pkcs12.load_pkcs12 - - -_PKCS12CATypes = typing.Union[ - x509.Certificate, - PKCS12Certificate, -] - - -def serialize_java_truststore( - certs: Iterable[PKCS12Certificate], - encryption_algorithm: serialization.KeySerializationEncryption, -) -> bytes: - if not certs: - raise ValueError("You must supply at least one cert") - - if not isinstance( - encryption_algorithm, serialization.KeySerializationEncryption - ): - raise TypeError( - "Key encryption algorithm must be a " - "KeySerializationEncryption instance" - ) - - return rust_pkcs12.serialize_java_truststore(certs, encryption_algorithm) - - -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: Iterable[_PKCS12CATypes] | None, - encryption_algorithm: serialization.KeySerializationEncryption, -) -> bytes: - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - - if not isinstance( - encryption_algorithm, serialization.KeySerializationEncryption - ): - raise TypeError( - "Key encryption algorithm must be a " - "KeySerializationEncryption instance" - ) - - if key is None and cert is None and not cas: - raise ValueError("You must supply at least one of key, cert, or cas") - - return rust_pkcs12.serialize_key_and_certificates( - name, key, cert, cas, encryption_algorithm - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py deleted file mode 100644 index 456dc5b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py +++ /dev/null @@ -1,411 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import email.base64mime -import email.generator -import email.message -import email.policy -import io -import typing -from collections.abc import Iterable - -from cryptography import utils, x509 -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa -from cryptography.hazmat.primitives.ciphers import ( - algorithms, -) -from cryptography.utils import _check_byteslike - -load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates - -load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates - -serialize_certificates = rust_pkcs7.serialize_certificates - -PKCS7HashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -] - -PKCS7PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey -] - -ContentEncryptionAlgorithm = typing.Union[ - typing.Type[algorithms.AES128], typing.Type[algorithms.AES256] -] - - -class PKCS7Options(utils.Enum): - Text = "Add text/plain MIME type" - Binary = "Don't translate input data into canonical MIME format" - DetachedSignature = "Don't embed data in the PKCS7 structure" - NoCapabilities = "Don't embed SMIME capabilities" - NoAttributes = "Don't embed authenticatedAttributes" - NoCerts = "Don't embed signer certificate" - - -class PKCS7SignatureBuilder: - def __init__( - self, - data: utils.Buffer | None = None, - signers: list[ - tuple[ - x509.Certificate, - PKCS7PrivateKeyTypes, - PKCS7HashTypes, - padding.PSS | padding.PKCS1v15 | None, - ] - ] = [], - additional_certs: list[x509.Certificate] = [], - ): - self._data = data - self._signers = signers - self._additional_certs = additional_certs - - def set_data(self, data: utils.Buffer) -> PKCS7SignatureBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7SignatureBuilder(data, self._signers) - - def add_signer( - self, - certificate: x509.Certificate, - private_key: PKCS7PrivateKeyTypes, - hash_algorithm: PKCS7HashTypes, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> PKCS7SignatureBuilder: - if not isinstance( - hash_algorithm, - ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ): - raise TypeError( - "hash_algorithm must be one of hashes.SHA224, " - "SHA256, SHA384, or SHA512" - ) - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance( - private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) - ): - raise TypeError("Only RSA & EC keys are supported at this time.") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return PKCS7SignatureBuilder( - self._data, - [ - *self._signers, - (certificate, private_key, hash_algorithm, rsa_padding), - ], - ) - - def add_certificate( - self, certificate: x509.Certificate - ) -> PKCS7SignatureBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - return PKCS7SignatureBuilder( - self._data, self._signers, [*self._additional_certs, certificate] - ) - - def sign( - self, - encoding: serialization.Encoding, - options: Iterable[PKCS7Options], - backend: typing.Any = None, - ) -> bytes: - if len(self._signers) == 0: - raise ValueError("Must have at least one signer") - if self._data is None: - raise ValueError("You must add data to sign") - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Text is a meaningless option unless it is accompanied by - # DetachedSignature - if ( - PKCS7Options.Text in options - and PKCS7Options.DetachedSignature not in options - ): - raise ValueError( - "When passing the Text option you must also pass " - "DetachedSignature" - ) - - if PKCS7Options.Text in options and encoding in ( - serialization.Encoding.DER, - serialization.Encoding.PEM, - ): - raise ValueError( - "The Text option is only available for SMIME serialization" - ) - - # No attributes implies no capabilities so we'll error if you try to - # pass both. - if ( - PKCS7Options.NoAttributes in options - and PKCS7Options.NoCapabilities in options - ): - raise ValueError( - "NoAttributes is a superset of NoCapabilities. Do not pass " - "both values." - ) - - return rust_pkcs7.sign_and_serialize(self, encoding, options) - - -class PKCS7EnvelopeBuilder: - def __init__( - self, - *, - _data: bytes | None = None, - _recipients: list[x509.Certificate] | None = None, - _content_encryption_algorithm: ContentEncryptionAlgorithm - | None = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): - raise UnsupportedAlgorithm( - "RSA with PKCS1 v1.5 padding is not supported by this version" - " of OpenSSL.", - _Reasons.UNSUPPORTED_PADDING, - ) - self._data = _data - self._recipients = _recipients if _recipients is not None else [] - self._content_encryption_algorithm = _content_encryption_algorithm - - def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7EnvelopeBuilder( - _data=data, - _recipients=self._recipients, - _content_encryption_algorithm=self._content_encryption_algorithm, - ) - - def add_recipient( - self, - certificate: x509.Certificate, - ) -> PKCS7EnvelopeBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance(certificate.public_key(), rsa.RSAPublicKey): - raise TypeError("Only RSA keys are supported at this time.") - - return PKCS7EnvelopeBuilder( - _data=self._data, - _recipients=[ - *self._recipients, - certificate, - ], - _content_encryption_algorithm=self._content_encryption_algorithm, - ) - - def set_content_encryption_algorithm( - self, content_encryption_algorithm: ContentEncryptionAlgorithm - ) -> PKCS7EnvelopeBuilder: - if self._content_encryption_algorithm is not None: - raise ValueError("Content encryption algo may only be set once") - if content_encryption_algorithm not in { - algorithms.AES128, - algorithms.AES256, - }: - raise TypeError("Only AES128 and AES256 are supported") - - return PKCS7EnvelopeBuilder( - _data=self._data, - _recipients=self._recipients, - _content_encryption_algorithm=content_encryption_algorithm, - ) - - def encrypt( - self, - encoding: serialization.Encoding, - options: Iterable[PKCS7Options], - ) -> bytes: - if len(self._recipients) == 0: - raise ValueError("Must have at least one recipient") - if self._data is None: - raise ValueError("You must add data to encrypt") - - # The default content encryption algorithm is AES-128, which the S/MIME - # v3.2 RFC specifies as MUST support (https://datatracker.ietf.org/doc/html/rfc5751#section-2.7) - content_encryption_algorithm = ( - self._content_encryption_algorithm or algorithms.AES128 - ) - - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Only allow options that make sense for encryption - if any( - opt not in [PKCS7Options.Text, PKCS7Options.Binary] - for opt in options - ): - raise ValueError( - "Only the following options are supported for encryption: " - "Text, Binary" - ) - elif PKCS7Options.Text in options and PKCS7Options.Binary in options: - # OpenSSL accepts both options at the same time, but ignores Text. - # We fail defensively to avoid unexpected outputs. - raise ValueError( - "Cannot use Binary and Text options at the same time" - ) - - return rust_pkcs7.encrypt_and_serialize( - self, content_encryption_algorithm, encoding, options - ) - - -pkcs7_decrypt_der = rust_pkcs7.decrypt_der -pkcs7_decrypt_pem = rust_pkcs7.decrypt_pem -pkcs7_decrypt_smime = rust_pkcs7.decrypt_smime - - -def _smime_signed_encode( - data: bytes, signature: bytes, micalg: str, text_mode: bool -) -> bytes: - # This function works pretty hard to replicate what OpenSSL does - # precisely. For good and for ill. - - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header( - "Content-Type", - "multipart/signed", - protocol="application/x-pkcs7-signature", - micalg=micalg, - ) - - m.preamble = "This is an S/MIME signed message\n" - - msg_part = OpenSSLMimePart() - msg_part.set_payload(data) - if text_mode: - msg_part.add_header("Content-Type", "text/plain") - m.attach(msg_part) - - sig_part = email.message.MIMEPart() - sig_part.add_header( - "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" - ) - sig_part.add_header("Content-Transfer-Encoding", "base64") - sig_part.add_header( - "Content-Disposition", "attachment", filename="smime.p7s" - ) - sig_part.set_payload( - email.base64mime.body_encode(signature, maxlinelen=65) - ) - del sig_part["MIME-Version"] - m.attach(sig_part) - - fp = io.BytesIO() - g = email.generator.BytesGenerator( - fp, - maxheaderlen=0, - mangle_from_=False, - policy=m.policy.clone(linesep="\r\n"), - ) - g.flatten(m) - return fp.getvalue() - - -def _smime_enveloped_encode(data: bytes) -> bytes: - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header("Content-Disposition", "attachment", filename="smime.p7m") - m.add_header( - "Content-Type", - "application/pkcs7-mime", - smime_type="enveloped-data", - name="smime.p7m", - ) - m.add_header("Content-Transfer-Encoding", "base64") - - m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) - - return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) - - -def _smime_enveloped_decode(data: bytes) -> bytes: - m = email.message_from_bytes(data) - if m.get_content_type() not in { - "application/x-pkcs7-mime", - "application/pkcs7-mime", - }: - raise ValueError("Not an S/MIME enveloped message") - return bytes(m.get_payload(decode=True)) - - -def _smime_remove_text_headers(data: bytes) -> bytes: - m = email.message_from_bytes(data) - # Using get() instead of get_content_type() since it has None as default, - # where the latter has "text/plain". Both methods are case-insensitive. - content_type = m.get("content-type") - if content_type is None: - raise ValueError( - "Decrypted MIME data has no 'Content-Type' header. " - "Please remove the 'Text' option to parse it manually." - ) - if "text/plain" not in content_type: - raise ValueError( - f"Decrypted MIME data content type is '{content_type}', not " - "'text/plain'. Remove the 'Text' option to parse it manually." - ) - return bytes(m.get_payload(decode=True)) - - -class OpenSSLMimePart(email.message.MIMEPart): - # A MIMEPart subclass that replicates OpenSSL's behavior of not including - # a newline if there are no headers. - def _write_headers(self, generator) -> None: - if list(self.raw_items()): - generator._write_headers(self) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py deleted file mode 100644 index c08b981..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py +++ /dev/null @@ -1,1619 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import enum -import os -import re -import typing -import warnings -from base64 import encodebytes as _base64_encode -from dataclasses import dataclass - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed25519, - padding, - rsa, -) -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.ciphers import ( - AEADDecryptionContext, - Cipher, - algorithms, - modes, -) -from cryptography.hazmat.primitives.serialization import ( - Encoding, - KeySerializationEncryption, - NoEncryption, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) - -try: - from bcrypt import kdf as _bcrypt_kdf - - _bcrypt_supported = True -except ImportError: - _bcrypt_supported = False - - def _bcrypt_kdf( - password: bytes, - salt: bytes, - desired_key_bytes: int, - rounds: int, - ignore_few_rounds: bool = False, - ) -> bytes: - raise UnsupportedAlgorithm("Need bcrypt module") - - -_SSH_ED25519 = b"ssh-ed25519" -_SSH_RSA = b"ssh-rsa" -_SSH_DSA = b"ssh-dss" -_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" -_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" -_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" -_CERT_SUFFIX = b"-cert-v01@openssh.com" - -# U2F application string suffixed pubkey -_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" -_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" - -# These are not key types, only algorithms, so they cannot appear -# as a public key type -_SSH_RSA_SHA256 = b"rsa-sha2-256" -_SSH_RSA_SHA512 = b"rsa-sha2-512" - -_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") -_SK_MAGIC = b"openssh-key-v1\0" -_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" -_SK_END = b"-----END OPENSSH PRIVATE KEY-----" -_BCRYPT = b"bcrypt" -_NONE = b"none" -_DEFAULT_CIPHER = b"aes256-ctr" -_DEFAULT_ROUNDS = 16 - -# re is only way to work on bytes-like data -_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) - -# padding for max blocksize -_PADDING = memoryview(bytearray(range(1, 1 + 16))) - - -@dataclass -class _SSHCipher: - alg: type[algorithms.AES] - key_len: int - mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] - block_len: int - iv_len: int - tag_len: int | None - is_aead: bool - - -# ciphers that are actually used in key wrapping -_SSH_CIPHERS: dict[bytes, _SSHCipher] = { - b"aes256-ctr": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CTR, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-cbc": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CBC, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-gcm@openssh.com": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.GCM, - block_len=16, - iv_len=12, - tag_len=16, - is_aead=True, - ), -} - -# map local curve name to key type -_ECDSA_KEY_TYPE = { - "secp256r1": _ECDSA_NISTP256, - "secp384r1": _ECDSA_NISTP384, - "secp521r1": _ECDSA_NISTP521, -} - - -def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: - if isinstance(key, ec.EllipticCurvePrivateKey): - key_type = _ecdsa_key_type(key.public_key()) - elif isinstance(key, ec.EllipticCurvePublicKey): - key_type = _ecdsa_key_type(key) - elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - key_type = _SSH_RSA - elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): - key_type = _SSH_DSA - elif isinstance( - key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) - ): - key_type = _SSH_ED25519 - else: - raise ValueError("Unsupported key type") - - return key_type - - -def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: - """Return SSH key_type and curve_name for private key.""" - curve = public_key.curve - if curve.name not in _ECDSA_KEY_TYPE: - raise ValueError( - f"Unsupported curve for ssh private key: {curve.name!r}" - ) - return _ECDSA_KEY_TYPE[curve.name] - - -def _ssh_pem_encode( - data: utils.Buffer, - prefix: bytes = _SK_START + b"\n", - suffix: bytes = _SK_END + b"\n", -) -> bytes: - return b"".join([prefix, _base64_encode(data), suffix]) - - -def _check_block_size(data: utils.Buffer, block_len: int) -> None: - """Require data to be full blocks""" - if not data or len(data) % block_len != 0: - raise ValueError("Corrupt data: missing padding") - - -def _check_empty(data: utils.Buffer) -> None: - """All data should have been parsed.""" - if data: - raise ValueError("Corrupt data: unparsed data") - - -def _init_cipher( - ciphername: bytes, - password: bytes | None, - salt: bytes, - rounds: int, -) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: - """Generate key + iv and return cipher.""" - if not password: - raise TypeError( - "Key is password-protected, but password was not provided." - ) - - ciph = _SSH_CIPHERS[ciphername] - seed = _bcrypt_kdf( - password, salt, ciph.key_len + ciph.iv_len, rounds, True - ) - return Cipher( - ciph.alg(seed[: ciph.key_len]), - ciph.mode(seed[ciph.key_len :]), - ) - - -def _get_u32(data: memoryview) -> tuple[int, memoryview]: - """Uint32""" - if len(data) < 4: - raise ValueError("Invalid data") - return int.from_bytes(data[:4], byteorder="big"), data[4:] - - -def _get_u64(data: memoryview) -> tuple[int, memoryview]: - """Uint64""" - if len(data) < 8: - raise ValueError("Invalid data") - return int.from_bytes(data[:8], byteorder="big"), data[8:] - - -def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: - """Bytes with u32 length prefix""" - n, data = _get_u32(data) - if n > len(data): - raise ValueError("Invalid data") - return data[:n], data[n:] - - -def _get_mpint(data: memoryview) -> tuple[int, memoryview]: - """Big integer.""" - val, data = _get_sshstr(data) - if val and val[0] > 0x7F: - raise ValueError("Invalid data") - return int.from_bytes(val, "big"), data - - -def _to_mpint(val: int) -> bytes: - """Storage format for signed bigint.""" - if val < 0: - raise ValueError("negative mpint not allowed") - if not val: - return b"" - nbytes = (val.bit_length() + 8) // 8 - return utils.int_to_bytes(val, nbytes) - - -class _FragList: - """Build recursive structure without data copy.""" - - flist: list[utils.Buffer] - - def __init__(self, init: list[utils.Buffer] | None = None) -> None: - self.flist = [] - if init: - self.flist.extend(init) - - def put_raw(self, val: utils.Buffer) -> None: - """Add plain bytes""" - self.flist.append(val) - - def put_u32(self, val: int) -> None: - """Big-endian uint32""" - self.flist.append(val.to_bytes(length=4, byteorder="big")) - - def put_u64(self, val: int) -> None: - """Big-endian uint64""" - self.flist.append(val.to_bytes(length=8, byteorder="big")) - - def put_sshstr(self, val: bytes | _FragList) -> None: - """Bytes prefixed with u32 length""" - if isinstance(val, (bytes, memoryview, bytearray)): - self.put_u32(len(val)) - self.flist.append(val) - else: - self.put_u32(val.size()) - self.flist.extend(val.flist) - - def put_mpint(self, val: int) -> None: - """Big-endian bigint prefixed with u32 length""" - self.put_sshstr(_to_mpint(val)) - - def size(self) -> int: - """Current number of bytes""" - return sum(map(len, self.flist)) - - def render(self, dstbuf: memoryview, pos: int = 0) -> int: - """Write into bytearray""" - for frag in self.flist: - flen = len(frag) - start, pos = pos, pos + flen - dstbuf[start:pos] = frag - return pos - - def tobytes(self) -> bytes: - """Return as bytes""" - buf = memoryview(bytearray(self.size())) - self.render(buf) - return buf.tobytes() - - -class _SSHFormatRSA: - """Format for RSA keys. - - Public: - mpint e, n - Private: - mpint n, e, d, iqmp, p, q - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[int, int], memoryview]: - """RSA public fields""" - e, data = _get_mpint(data) - n, data = _get_mpint(data) - return (e, n), data - - def load_public( - self, data: memoryview - ) -> tuple[rsa.RSAPublicKey, memoryview]: - """Make RSA public key from data.""" - (e, n), data = self.get_public(data) - public_numbers = rsa.RSAPublicNumbers(e, n) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[rsa.RSAPrivateKey, memoryview]: - """Make RSA private key from data.""" - n, data = _get_mpint(data) - e, data = _get_mpint(data) - d, data = _get_mpint(data) - iqmp, data = _get_mpint(data) - p, data = _get_mpint(data) - q, data = _get_mpint(data) - - if (e, n) != pubfields: - raise ValueError("Corrupt data: rsa field mismatch") - dmp1 = rsa.rsa_crt_dmp1(d, p) - dmq1 = rsa.rsa_crt_dmq1(d, q) - public_numbers = rsa.RSAPublicNumbers(e, n) - private_numbers = rsa.RSAPrivateNumbers( - p, q, d, dmp1, dmq1, iqmp, public_numbers - ) - private_key = private_numbers.private_key( - unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation - ) - return private_key, data - - def encode_public( - self, public_key: rsa.RSAPublicKey, f_pub: _FragList - ) -> None: - """Write RSA public key""" - pubn = public_key.public_numbers() - f_pub.put_mpint(pubn.e) - f_pub.put_mpint(pubn.n) - - def encode_private( - self, private_key: rsa.RSAPrivateKey, f_priv: _FragList - ) -> None: - """Write RSA private key""" - private_numbers = private_key.private_numbers() - public_numbers = private_numbers.public_numbers - - f_priv.put_mpint(public_numbers.n) - f_priv.put_mpint(public_numbers.e) - - f_priv.put_mpint(private_numbers.d) - f_priv.put_mpint(private_numbers.iqmp) - f_priv.put_mpint(private_numbers.p) - f_priv.put_mpint(private_numbers.q) - - -class _SSHFormatDSA: - """Format for DSA keys. - - Public: - mpint p, q, g, y - Private: - mpint p, q, g, y, x - """ - - def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: - """DSA public fields""" - p, data = _get_mpint(data) - q, data = _get_mpint(data) - g, data = _get_mpint(data) - y, data = _get_mpint(data) - return (p, q, g, y), data - - def load_public( - self, data: memoryview - ) -> tuple[dsa.DSAPublicKey, memoryview]: - """Make DSA public key from data.""" - (p, q, g, y), data = self.get_public(data) - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[dsa.DSAPrivateKey, memoryview]: - """Make DSA private key from data.""" - (p, q, g, y), data = self.get_public(data) - x, data = _get_mpint(data) - - if (p, q, g, y) != pubfields: - raise ValueError("Corrupt data: dsa field mismatch") - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) - private_key = private_numbers.private_key() - return private_key, data - - def encode_public( - self, public_key: dsa.DSAPublicKey, f_pub: _FragList - ) -> None: - """Write DSA public key""" - public_numbers = public_key.public_numbers() - parameter_numbers = public_numbers.parameter_numbers - self._validate(public_numbers) - - f_pub.put_mpint(parameter_numbers.p) - f_pub.put_mpint(parameter_numbers.q) - f_pub.put_mpint(parameter_numbers.g) - f_pub.put_mpint(public_numbers.y) - - def encode_private( - self, private_key: dsa.DSAPrivateKey, f_priv: _FragList - ) -> None: - """Write DSA private key""" - self.encode_public(private_key.public_key(), f_priv) - f_priv.put_mpint(private_key.private_numbers().x) - - def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: - parameter_numbers = public_numbers.parameter_numbers - if parameter_numbers.p.bit_length() != 1024: - raise ValueError("SSH supports only 1024 bit DSA keys") - - -class _SSHFormatECDSA: - """Format for ECDSA keys. - - Public: - str curve - bytes point - Private: - str curve - bytes point - mpint secret - """ - - def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): - self.ssh_curve_name = ssh_curve_name - self.curve = curve - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview, memoryview], memoryview]: - """ECDSA public fields""" - curve, data = _get_sshstr(data) - point, data = _get_sshstr(data) - if curve != self.ssh_curve_name: - raise ValueError("Curve name mismatch") - if point[0] != 4: - raise NotImplementedError("Need uncompressed point") - return (curve, point), data - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - (_, point), data = self.get_public(data) - public_key = ec.EllipticCurvePublicKey.from_encoded_point( - self.curve, point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: - """Make ECDSA private key from data.""" - (curve_name, point), data = self.get_public(data) - secret, data = _get_mpint(data) - - if (curve_name, point) != pubfields: - raise ValueError("Corrupt data: ecdsa field mismatch") - private_key = ec.derive_private_key(secret, self.curve) - return private_key, data - - def encode_public( - self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList - ) -> None: - """Write ECDSA public key""" - point = public_key.public_bytes( - Encoding.X962, PublicFormat.UncompressedPoint - ) - f_pub.put_sshstr(self.ssh_curve_name) - f_pub.put_sshstr(point) - - def encode_private( - self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList - ) -> None: - """Write ECDSA private key""" - public_key = private_key.public_key() - private_numbers = private_key.private_numbers() - - self.encode_public(public_key, f_priv) - f_priv.put_mpint(private_numbers.private_value) - - -class _SSHFormatEd25519: - """Format for Ed25519 keys. - - Public: - bytes point - Private: - bytes point - bytes secret_and_point - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview], memoryview]: - """Ed25519 public fields""" - point, data = _get_sshstr(data) - return (point,), data - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - (point,), data = self.get_public(data) - public_key = ed25519.Ed25519PublicKey.from_public_bytes( - point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool - ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: - """Make Ed25519 private key from data.""" - (point,), data = self.get_public(data) - keypair, data = _get_sshstr(data) - - secret = keypair[:32] - point2 = keypair[32:] - if point != point2 or (point,) != pubfields: - raise ValueError("Corrupt data: ed25519 field mismatch") - private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) - return private_key, data - - def encode_public( - self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList - ) -> None: - """Write Ed25519 public key""" - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_pub.put_sshstr(raw_public_key) - - def encode_private( - self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList - ) -> None: - """Write Ed25519 private key""" - public_key = private_key.public_key() - raw_private_key = private_key.private_bytes( - Encoding.Raw, PrivateFormat.Raw, NoEncryption() - ) - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_keypair = _FragList([raw_private_key, raw_public_key]) - - self.encode_public(public_key, f_priv) - f_priv.put_sshstr(f_keypair) - - -def load_application(data) -> tuple[memoryview, memoryview]: - """ - U2F application strings - """ - application, data = _get_sshstr(data) - if not application.tobytes().startswith(b"ssh:"): - raise ValueError( - "U2F application string does not start with b'ssh:' " - f"({application})" - ) - return application, data - - -class _SSHFormatSKEd25519: - """ - The format of a sk-ssh-ed25519@openssh.com public key is: - - string "sk-ssh-ed25519@openssh.com" - string public key - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) - _, data = load_application(data) - return public_key, data - - def get_public(self, data: memoryview) -> typing.NoReturn: - # Confusingly `get_public` is an entry point used by private key - # loading. - raise UnsupportedAlgorithm( - "sk-ssh-ed25519 private keys cannot be loaded" - ) - - -class _SSHFormatSKECDSA: - """ - The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: - - string "sk-ecdsa-sha2-nistp256@openssh.com" - string curve name - ec_point Q - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) - _, data = load_application(data) - return public_key, data - - def get_public(self, data: memoryview) -> typing.NoReturn: - # Confusingly `get_public` is an entry point used by private key - # loading. - raise UnsupportedAlgorithm( - "sk-ecdsa-sha2-nistp256 private keys cannot be loaded" - ) - - -_KEY_FORMATS = { - _SSH_RSA: _SSHFormatRSA(), - _SSH_DSA: _SSHFormatDSA(), - _SSH_ED25519: _SSHFormatEd25519(), - _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), - _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), - _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), - _SK_SSH_ED25519: _SSHFormatSKEd25519(), - _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), -} - - -def _lookup_kformat(key_type: utils.Buffer): - """Return valid format or throw error""" - if not isinstance(key_type, bytes): - key_type = memoryview(key_type).tobytes() - if key_type in _KEY_FORMATS: - return _KEY_FORMATS[key_type] - raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") - - -SSHPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -def load_ssh_private_key( - data: utils.Buffer, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> SSHPrivateKeyTypes: - """Load private key from OpenSSH custom encoding.""" - utils._check_byteslike("data", data) - if password is not None: - utils._check_bytes("password", password) - - m = _PEM_RC.search(data) - if not m: - raise ValueError("Not OpenSSH private key format") - p1 = m.start(1) - p2 = m.end(1) - data = binascii.a2b_base64(memoryview(data)[p1:p2]) - if not data.startswith(_SK_MAGIC): - raise ValueError("Not OpenSSH private key format") - data = memoryview(data)[len(_SK_MAGIC) :] - - # parse header - ciphername, data = _get_sshstr(data) - kdfname, data = _get_sshstr(data) - kdfoptions, data = _get_sshstr(data) - nkeys, data = _get_u32(data) - if nkeys != 1: - raise ValueError("Only one key supported") - - # load public key data - pubdata, data = _get_sshstr(data) - pub_key_type, pubdata = _get_sshstr(pubdata) - kformat = _lookup_kformat(pub_key_type) - pubfields, pubdata = kformat.get_public(pubdata) - _check_empty(pubdata) - - if ciphername != _NONE or kdfname != _NONE: - ciphername_bytes = ciphername.tobytes() - if ciphername_bytes not in _SSH_CIPHERS: - raise UnsupportedAlgorithm( - f"Unsupported cipher: {ciphername_bytes!r}" - ) - if kdfname != _BCRYPT: - raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") - blklen = _SSH_CIPHERS[ciphername_bytes].block_len - tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len - # load secret data - edata, data = _get_sshstr(data) - # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for - # information about how OpenSSH handles AEAD tags - if _SSH_CIPHERS[ciphername_bytes].is_aead: - tag = bytes(data) - if len(tag) != tag_len: - raise ValueError("Corrupt data: invalid tag length for cipher") - else: - _check_empty(data) - _check_block_size(edata, blklen) - salt, kbuf = _get_sshstr(kdfoptions) - rounds, kbuf = _get_u32(kbuf) - _check_empty(kbuf) - ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) - dec = ciph.decryptor() - edata = memoryview(dec.update(edata)) - if _SSH_CIPHERS[ciphername_bytes].is_aead: - assert isinstance(dec, AEADDecryptionContext) - _check_empty(dec.finalize_with_tag(tag)) - else: - # _check_block_size requires data to be a full block so there - # should be no output from finalize - _check_empty(dec.finalize()) - else: - if password: - raise TypeError( - "Password was given but private key is not encrypted." - ) - # load secret data - edata, data = _get_sshstr(data) - _check_empty(data) - blklen = 8 - _check_block_size(edata, blklen) - ck1, edata = _get_u32(edata) - ck2, edata = _get_u32(edata) - if ck1 != ck2: - raise ValueError("Corrupt data: broken checksum") - - # load per-key struct - key_type, edata = _get_sshstr(edata) - if key_type != pub_key_type: - raise ValueError("Corrupt data: key type mismatch") - private_key, edata = kformat.load_private( - edata, - pubfields, - unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation, - ) - # We don't use the comment - _, edata = _get_sshstr(edata) - - # yes, SSH does padding check *after* all other parsing is done. - # need to follow as it writes zero-byte padding too. - if edata != _PADDING[: len(edata)]: - raise ValueError("Corrupt data: invalid padding") - - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - - return private_key - - -def _serialize_ssh_private_key( - private_key: SSHPrivateKeyTypes, - password: bytes, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: - """Serialize private key with OpenSSH custom encoding.""" - utils._check_bytes("password", password) - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - - key_type = _get_ssh_key_type(private_key) - kformat = _lookup_kformat(key_type) - - # setup parameters - f_kdfoptions = _FragList() - if password: - ciphername = _DEFAULT_CIPHER - blklen = _SSH_CIPHERS[ciphername].block_len - kdfname = _BCRYPT - rounds = _DEFAULT_ROUNDS - if ( - isinstance(encryption_algorithm, _KeySerializationEncryption) - and encryption_algorithm._kdf_rounds is not None - ): - rounds = encryption_algorithm._kdf_rounds - salt = os.urandom(16) - f_kdfoptions.put_sshstr(salt) - f_kdfoptions.put_u32(rounds) - ciph = _init_cipher(ciphername, password, salt, rounds) - else: - ciphername = kdfname = _NONE - blklen = 8 - ciph = None - nkeys = 1 - checkval = os.urandom(4) - comment = b"" - - # encode public and private parts together - f_public_key = _FragList() - f_public_key.put_sshstr(key_type) - kformat.encode_public(private_key.public_key(), f_public_key) - - f_secrets = _FragList([checkval, checkval]) - f_secrets.put_sshstr(key_type) - kformat.encode_private(private_key, f_secrets) - f_secrets.put_sshstr(comment) - f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) - - # top-level structure - f_main = _FragList() - f_main.put_raw(_SK_MAGIC) - f_main.put_sshstr(ciphername) - f_main.put_sshstr(kdfname) - f_main.put_sshstr(f_kdfoptions) - f_main.put_u32(nkeys) - f_main.put_sshstr(f_public_key) - f_main.put_sshstr(f_secrets) - - # copy result info bytearray - slen = f_secrets.size() - mlen = f_main.size() - buf = memoryview(bytearray(mlen + blklen)) - f_main.render(buf) - ofs = mlen - slen - - # encrypt in-place - if ciph is not None: - ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) - - return _ssh_pem_encode(buf[:mlen]) - - -SSHPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - dsa.DSAPublicKey, - ed25519.Ed25519PublicKey, -] - -SSHCertPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, -] - - -class SSHCertificateType(enum.Enum): - USER = 1 - HOST = 2 - - -class SSHCertificate: - def __init__( - self, - _nonce: memoryview, - _public_key: SSHPublicKeyTypes, - _serial: int, - _cctype: int, - _key_id: memoryview, - _valid_principals: list[bytes], - _valid_after: int, - _valid_before: int, - _critical_options: dict[bytes, bytes], - _extensions: dict[bytes, bytes], - _sig_type: memoryview, - _sig_key: memoryview, - _inner_sig_type: memoryview, - _signature: memoryview, - _tbs_cert_body: memoryview, - _cert_key_type: bytes, - _cert_body: memoryview, - ): - self._nonce = _nonce - self._public_key = _public_key - self._serial = _serial - try: - self._type = SSHCertificateType(_cctype) - except ValueError: - raise ValueError("Invalid certificate type") - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_after = _valid_after - self._valid_before = _valid_before - self._critical_options = _critical_options - self._extensions = _extensions - self._sig_type = _sig_type - self._sig_key = _sig_key - self._inner_sig_type = _inner_sig_type - self._signature = _signature - self._cert_key_type = _cert_key_type - self._cert_body = _cert_body - self._tbs_cert_body = _tbs_cert_body - - @property - def nonce(self) -> bytes: - return bytes(self._nonce) - - def public_key(self) -> SSHCertPublicKeyTypes: - # make mypy happy until we remove DSA support entirely and - # the underlying union won't have a disallowed type - return typing.cast(SSHCertPublicKeyTypes, self._public_key) - - @property - def serial(self) -> int: - return self._serial - - @property - def type(self) -> SSHCertificateType: - return self._type - - @property - def key_id(self) -> bytes: - return bytes(self._key_id) - - @property - def valid_principals(self) -> list[bytes]: - return self._valid_principals - - @property - def valid_before(self) -> int: - return self._valid_before - - @property - def valid_after(self) -> int: - return self._valid_after - - @property - def critical_options(self) -> dict[bytes, bytes]: - return self._critical_options - - @property - def extensions(self) -> dict[bytes, bytes]: - return self._extensions - - def signature_key(self) -> SSHCertPublicKeyTypes: - sigformat = _lookup_kformat(self._sig_type) - signature_key, sigkey_rest = sigformat.load_public(self._sig_key) - _check_empty(sigkey_rest) - return signature_key - - def public_bytes(self) -> bytes: - return ( - bytes(self._cert_key_type) - + b" " - + binascii.b2a_base64(bytes(self._cert_body), newline=False) - ) - - def verify_cert_signature(self) -> None: - signature_key = self.signature_key() - if isinstance(signature_key, ed25519.Ed25519PublicKey): - signature_key.verify( - bytes(self._signature), bytes(self._tbs_cert_body) - ) - elif isinstance(signature_key, ec.EllipticCurvePublicKey): - # The signature is encoded as a pair of big-endian integers - r, data = _get_mpint(self._signature) - s, data = _get_mpint(data) - _check_empty(data) - computed_sig = asym_utils.encode_dss_signature(r, s) - hash_alg = _get_ec_hash_alg(signature_key.curve) - signature_key.verify( - computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) - ) - else: - assert isinstance(signature_key, rsa.RSAPublicKey) - if self._inner_sig_type == _SSH_RSA: - hash_alg = hashes.SHA1() - elif self._inner_sig_type == _SSH_RSA_SHA256: - hash_alg = hashes.SHA256() - else: - assert self._inner_sig_type == _SSH_RSA_SHA512 - hash_alg = hashes.SHA512() - signature_key.verify( - bytes(self._signature), - bytes(self._tbs_cert_body), - padding.PKCS1v15(), - hash_alg, - ) - - -def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: - if isinstance(curve, ec.SECP256R1): - return hashes.SHA256() - elif isinstance(curve, ec.SECP384R1): - return hashes.SHA384() - else: - assert isinstance(curve, ec.SECP521R1) - return hashes.SHA512() - - -def _load_ssh_public_identity( - data: utils.Buffer, - _legacy_dsa_allowed=False, -) -> SSHCertificate | SSHPublicKeyTypes: - utils._check_byteslike("data", data) - - m = _SSH_PUBKEY_RC.match(data) - if not m: - raise ValueError("Invalid line format") - key_type = orig_key_type = m.group(1) - key_body = m.group(2) - with_cert = False - if key_type.endswith(_CERT_SUFFIX): - with_cert = True - key_type = key_type[: -len(_CERT_SUFFIX)] - if key_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA keys aren't supported in SSH certificates" - ) - kformat = _lookup_kformat(key_type) - - try: - rest = memoryview(binascii.a2b_base64(key_body)) - except (TypeError, binascii.Error): - raise ValueError("Invalid format") - - if with_cert: - cert_body = rest - inner_key_type, rest = _get_sshstr(rest) - if inner_key_type != orig_key_type: - raise ValueError("Invalid key format") - if with_cert: - nonce, rest = _get_sshstr(rest) - public_key, rest = kformat.load_public(rest) - if with_cert: - serial, rest = _get_u64(rest) - cctype, rest = _get_u32(rest) - key_id, rest = _get_sshstr(rest) - principals, rest = _get_sshstr(rest) - valid_principals = [] - while principals: - principal, principals = _get_sshstr(principals) - valid_principals.append(bytes(principal)) - valid_after, rest = _get_u64(rest) - valid_before, rest = _get_u64(rest) - crit_options, rest = _get_sshstr(rest) - critical_options = _parse_exts_opts(crit_options) - exts, rest = _get_sshstr(rest) - extensions = _parse_exts_opts(exts) - # Get the reserved field, which is unused. - _, rest = _get_sshstr(rest) - sig_key_raw, rest = _get_sshstr(rest) - sig_type, sig_key = _get_sshstr(sig_key_raw) - if sig_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA signatures aren't supported in SSH certificates" - ) - # Get the entire cert body and subtract the signature - tbs_cert_body = cert_body[: -len(rest)] - signature_raw, rest = _get_sshstr(rest) - _check_empty(rest) - inner_sig_type, sig_rest = _get_sshstr(signature_raw) - # RSA certs can have multiple algorithm types - if ( - sig_type == _SSH_RSA - and inner_sig_type - not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] - ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): - raise ValueError("Signature key type does not match") - signature, sig_rest = _get_sshstr(sig_rest) - _check_empty(sig_rest) - return SSHCertificate( - nonce, - public_key, - serial, - cctype, - key_id, - valid_principals, - valid_after, - valid_before, - critical_options, - extensions, - sig_type, - sig_key, - inner_sig_type, - signature, - tbs_cert_body, - orig_key_type, - cert_body, - ) - else: - _check_empty(rest) - return public_key - - -def load_ssh_public_identity( - data: bytes, -) -> SSHCertificate | SSHPublicKeyTypes: - return _load_ssh_public_identity(data) - - -def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: - result: dict[bytes, bytes] = {} - last_name = None - while exts_opts: - name, exts_opts = _get_sshstr(exts_opts) - bname: bytes = bytes(name) - if bname in result: - raise ValueError("Duplicate name") - if last_name is not None and bname < last_name: - raise ValueError("Fields not lexically sorted") - value, exts_opts = _get_sshstr(exts_opts) - if len(value) > 0: - value, extra = _get_sshstr(value) - if len(extra) > 0: - raise ValueError("Unexpected extra data after value") - result[bname] = bytes(value) - last_name = bname - return result - - -def ssh_key_fingerprint( - key: SSHPublicKeyTypes, - hash_algorithm: hashes.MD5 | hashes.SHA256, -) -> bytes: - if not isinstance(hash_algorithm, (hashes.MD5, hashes.SHA256)): - raise TypeError("hash_algorithm must be either MD5 or SHA256") - - key_type = _get_ssh_key_type(key) - kformat = _lookup_kformat(key_type) - - f_pub = _FragList() - f_pub.put_sshstr(key_type) - kformat.encode_public(key, f_pub) - - ssh_binary_data = f_pub.tobytes() - - # Hash the binary data - hash_obj = hashes.Hash(hash_algorithm) - hash_obj.update(ssh_binary_data) - return hash_obj.finalize() - - -def load_ssh_public_key( - data: utils.Buffer, backend: typing.Any = None -) -> SSHPublicKeyTypes: - cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) - public_key: SSHPublicKeyTypes - if isinstance(cert_or_key, SSHCertificate): - public_key = cert_or_key.public_key() - else: - public_key = cert_or_key - - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - return public_key - - -def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: - """One-line public key format for OpenSSH""" - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - key_type = _get_ssh_key_type(public_key) - kformat = _lookup_kformat(key_type) - - f_pub = _FragList() - f_pub.put_sshstr(key_type) - kformat.encode_public(public_key, f_pub) - - pub = binascii.b2a_base64(f_pub.tobytes()).strip() - return b"".join([key_type, b" ", pub]) - - -SSHCertPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -# This is an undocumented limit enforced in the openssh codebase for sshd and -# ssh-keygen, but it is undefined in the ssh certificates spec. -_SSHKEY_CERT_MAX_PRINCIPALS = 256 - - -class SSHCertificateBuilder: - def __init__( - self, - _public_key: SSHCertPublicKeyTypes | None = None, - _serial: int | None = None, - _type: SSHCertificateType | None = None, - _key_id: bytes | None = None, - _valid_principals: list[bytes] = [], - _valid_for_all_principals: bool = False, - _valid_before: int | None = None, - _valid_after: int | None = None, - _critical_options: list[tuple[bytes, bytes]] = [], - _extensions: list[tuple[bytes, bytes]] = [], - ): - self._public_key = _public_key - self._serial = _serial - self._type = _type - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_for_all_principals = _valid_for_all_principals - self._valid_before = _valid_before - self._valid_after = _valid_after - self._critical_options = _critical_options - self._extensions = _extensions - - def public_key( - self, public_key: SSHCertPublicKeyTypes - ) -> SSHCertificateBuilder: - if not isinstance( - public_key, - ( - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, - ), - ): - raise TypeError("Unsupported key type") - if self._public_key is not None: - raise ValueError("public_key already set") - - return SSHCertificateBuilder( - _public_key=public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def serial(self, serial: int) -> SSHCertificateBuilder: - if not isinstance(serial, int): - raise TypeError("serial must be an integer") - if not 0 <= serial < 2**64: - raise ValueError("serial must be between 0 and 2**64") - if self._serial is not None: - raise ValueError("serial already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: - if not isinstance(type, SSHCertificateType): - raise TypeError("type must be an SSHCertificateType") - if self._type is not None: - raise ValueError("type already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def key_id(self, key_id: bytes) -> SSHCertificateBuilder: - if not isinstance(key_id, bytes): - raise TypeError("key_id must be bytes") - if self._key_id is not None: - raise ValueError("key_id already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_principals( - self, valid_principals: list[bytes] - ) -> SSHCertificateBuilder: - if self._valid_for_all_principals: - raise ValueError( - "Principals can't be set because the cert is valid " - "for all principals" - ) - if ( - not all(isinstance(x, bytes) for x in valid_principals) - or not valid_principals - ): - raise TypeError( - "principals must be a list of bytes and can't be empty" - ) - if self._valid_principals: - raise ValueError("valid_principals already set") - - if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: - raise ValueError( - "Reached or exceeded the maximum number of valid_principals" - ) - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_for_all_principals(self): - if self._valid_principals: - raise ValueError( - "valid_principals already set, can't set " - "valid_for_all_principals" - ) - if self._valid_for_all_principals: - raise ValueError("valid_for_all_principals already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=True, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_before, (int, float)): - raise TypeError("valid_before must be an int or float") - valid_before = int(valid_before) - if valid_before < 0 or valid_before >= 2**64: - raise ValueError("valid_before must [0, 2**64)") - if self._valid_before is not None: - raise ValueError("valid_before already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_after, (int, float)): - raise TypeError("valid_after must be an int or float") - valid_after = int(valid_after) - if valid_after < 0 or valid_after >= 2**64: - raise ValueError("valid_after must [0, 2**64)") - if self._valid_after is not None: - raise ValueError("valid_after already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def add_critical_option( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._critical_options]: - raise ValueError("Duplicate critical option name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=[*self._critical_options, (name, value)], - _extensions=self._extensions, - ) - - def add_extension( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._extensions]: - raise ValueError("Duplicate extension name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=[*self._extensions, (name, value)], - ) - - def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: - if not isinstance( - private_key, - ( - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, - ), - ): - raise TypeError("Unsupported private key type") - - if self._public_key is None: - raise ValueError("public_key must be set") - - # Not required - serial = 0 if self._serial is None else self._serial - - if self._type is None: - raise ValueError("type must be set") - - # Not required - key_id = b"" if self._key_id is None else self._key_id - - # A zero length list is valid, but means the certificate - # is valid for any principal of the specified type. We require - # the user to explicitly set valid_for_all_principals to get - # that behavior. - if not self._valid_principals and not self._valid_for_all_principals: - raise ValueError( - "valid_principals must be set if valid_for_all_principals " - "is False" - ) - - if self._valid_before is None: - raise ValueError("valid_before must be set") - - if self._valid_after is None: - raise ValueError("valid_after must be set") - - if self._valid_after > self._valid_before: - raise ValueError("valid_after must be earlier than valid_before") - - # lexically sort our byte strings - self._critical_options.sort(key=lambda x: x[0]) - self._extensions.sort(key=lambda x: x[0]) - - key_type = _get_ssh_key_type(self._public_key) - cert_prefix = key_type + _CERT_SUFFIX - - # Marshal the bytes to be signed - nonce = os.urandom(32) - kformat = _lookup_kformat(key_type) - f = _FragList() - f.put_sshstr(cert_prefix) - f.put_sshstr(nonce) - kformat.encode_public(self._public_key, f) - f.put_u64(serial) - f.put_u32(self._type.value) - f.put_sshstr(key_id) - fprincipals = _FragList() - for p in self._valid_principals: - fprincipals.put_sshstr(p) - f.put_sshstr(fprincipals.tobytes()) - f.put_u64(self._valid_after) - f.put_u64(self._valid_before) - fcrit = _FragList() - for name, value in self._critical_options: - fcrit.put_sshstr(name) - if len(value) > 0: - foptval = _FragList() - foptval.put_sshstr(value) - fcrit.put_sshstr(foptval.tobytes()) - else: - fcrit.put_sshstr(value) - f.put_sshstr(fcrit.tobytes()) - fext = _FragList() - for name, value in self._extensions: - fext.put_sshstr(name) - if len(value) > 0: - fextval = _FragList() - fextval.put_sshstr(value) - fext.put_sshstr(fextval.tobytes()) - else: - fext.put_sshstr(value) - f.put_sshstr(fext.tobytes()) - f.put_sshstr(b"") # RESERVED FIELD - # encode CA public key - ca_type = _get_ssh_key_type(private_key) - caformat = _lookup_kformat(ca_type) - caf = _FragList() - caf.put_sshstr(ca_type) - caformat.encode_public(private_key.public_key(), caf) - f.put_sshstr(caf.tobytes()) - # Sigs according to the rules defined for the CA's public key - # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, - # and RFC8032 for Ed25519). - if isinstance(private_key, ed25519.Ed25519PrivateKey): - signature = private_key.sign(f.tobytes()) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - elif isinstance(private_key, ec.EllipticCurvePrivateKey): - hash_alg = _get_ec_hash_alg(private_key.curve) - signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) - r, s = asym_utils.decode_dss_signature(signature) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsigblob = _FragList() - fsigblob.put_mpint(r) - fsigblob.put_mpint(s) - fsig.put_sshstr(fsigblob.tobytes()) - f.put_sshstr(fsig.tobytes()) - - else: - assert isinstance(private_key, rsa.RSAPrivateKey) - # Just like Golang, we're going to use SHA512 for RSA - # https://cs.opensource.google/go/x/crypto/+/refs/tags/ - # v0.4.0:ssh/certs.go;l=445 - # RFC 8332 defines SHA256 and 512 as options - fsig = _FragList() - fsig.put_sshstr(_SSH_RSA_SHA512) - signature = private_key.sign( - f.tobytes(), padding.PKCS1v15(), hashes.SHA512() - ) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - - cert_data = binascii.b2a_base64(f.tobytes()).strip() - # load_ssh_public_identity returns a union, but this is - # guaranteed to be an SSHCertificate, so we cast to make - # mypy happy. - return typing.cast( - SSHCertificate, - load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py deleted file mode 100644 index c1af423..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -class InvalidToken(Exception): - pass diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index f45ef24..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc deleted file mode 100644 index 91f290b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc deleted file mode 100644 index 9438b9b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py deleted file mode 100644 index 21fb000..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py +++ /dev/null @@ -1,101 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import typing -from urllib.parse import quote, urlencode - -from cryptography.hazmat.primitives import constant_time, hmac -from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 -from cryptography.hazmat.primitives.twofactor import InvalidToken -from cryptography.utils import Buffer - -HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] - - -def _generate_uri( - hotp: HOTP, - type_name: str, - account_name: str, - issuer: str | None, - extra_parameters: list[tuple[str, int]], -) -> str: - parameters = [ - ("digits", hotp._length), - ("secret", base64.b32encode(hotp._key)), - ("algorithm", hotp._algorithm.name.upper()), - ] - - if issuer is not None: - parameters.append(("issuer", issuer)) - - parameters.extend(extra_parameters) - - label = ( - f"{quote(issuer)}:{quote(account_name)}" - if issuer - else quote(account_name) - ) - return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" - - -class HOTP: - def __init__( - self, - key: Buffer, - length: int, - algorithm: HOTPHashTypes, - backend: typing.Any = None, - enforce_key_length: bool = True, - ) -> None: - if len(key) < 16 and enforce_key_length is True: - raise ValueError("Key length has to be at least 128 bits.") - - if not isinstance(length, int): - raise TypeError("Length parameter must be an integer type.") - - if length < 6 or length > 8: - raise ValueError("Length of HOTP has to be between 6 and 8.") - - if not isinstance(algorithm, (SHA1, SHA256, SHA512)): - raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") - - self._key = key - self._length = length - self._algorithm = algorithm - - def generate(self, counter: int) -> bytes: - if not isinstance(counter, int): - raise TypeError("Counter parameter must be an integer type.") - - truncated_value = self._dynamic_truncate(counter) - hotp = truncated_value % (10**self._length) - return "{0:0{1}}".format(hotp, self._length).encode() - - def verify(self, hotp: bytes, counter: int) -> None: - if not constant_time.bytes_eq(self.generate(counter), hotp): - raise InvalidToken("Supplied HOTP value does not match.") - - def _dynamic_truncate(self, counter: int) -> int: - ctx = hmac.HMAC(self._key, self._algorithm) - - try: - ctx.update(counter.to_bytes(length=8, byteorder="big")) - except OverflowError: - raise ValueError(f"Counter must be between 0 and {2**64 - 1}.") - - hmac_value = ctx.finalize() - - offset = hmac_value[len(hmac_value) - 1] & 0b1111 - p = hmac_value[offset : offset + 4] - return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF - - def get_provisioning_uri( - self, account_name: str, counter: int, issuer: str | None - ) -> str: - return _generate_uri( - self, "hotp", account_name, issuer, [("counter", int(counter))] - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py deleted file mode 100644 index 10c725c..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives import constant_time -from cryptography.hazmat.primitives.twofactor import InvalidToken -from cryptography.hazmat.primitives.twofactor.hotp import ( - HOTP, - HOTPHashTypes, - _generate_uri, -) -from cryptography.utils import Buffer - - -class TOTP: - def __init__( - self, - key: Buffer, - length: int, - algorithm: HOTPHashTypes, - time_step: int, - backend: typing.Any = None, - enforce_key_length: bool = True, - ): - self._time_step = time_step - self._hotp = HOTP( - key, length, algorithm, enforce_key_length=enforce_key_length - ) - - def generate(self, time: int | float) -> bytes: - if not isinstance(time, (int, float)): - raise TypeError( - "Time parameter must be an integer type or float type." - ) - - counter = int(time / self._time_step) - return self._hotp.generate(counter) - - def verify(self, totp: bytes, time: int) -> None: - if not constant_time.bytes_eq(self.generate(time), totp): - raise InvalidToken("Supplied TOTP value does not match.") - - def get_provisioning_uri( - self, account_name: str, issuer: str | None - ) -> str: - return _generate_uri( - self._hotp, - "totp", - account_name, - issuer, - [("period", int(self._time_step))], - ) diff --git a/venv/lib/python3.10/site-packages/cryptography/py.typed b/venv/lib/python3.10/site-packages/cryptography/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/cryptography/utils.py b/venv/lib/python3.10/site-packages/cryptography/utils.py deleted file mode 100644 index 7de7e4d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/utils.py +++ /dev/null @@ -1,139 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import enum -import sys -import types -import typing -import warnings -from collections.abc import Callable, Sequence - - -# We use a UserWarning subclass, instead of DeprecationWarning, because CPython -# decided deprecation warnings should be invisible by default. -class CryptographyDeprecationWarning(UserWarning): - pass - - -# Several APIs were deprecated with no specific end-of-life date because of the -# ubiquity of their use. They should not be removed until we agree on when that -# cycle ends. -DeprecatedIn36 = CryptographyDeprecationWarning -DeprecatedIn37 = CryptographyDeprecationWarning -DeprecatedIn40 = CryptographyDeprecationWarning -DeprecatedIn41 = CryptographyDeprecationWarning -DeprecatedIn42 = CryptographyDeprecationWarning -DeprecatedIn43 = CryptographyDeprecationWarning -DeprecatedIn45 = CryptographyDeprecationWarning - - -# If you're wondering why we don't use `Buffer`, it's because `Buffer` would -# be more accurately named: Bufferable. It means something which has an -# `__buffer__`. Which means you can't actually treat the result as a buffer -# (and do things like take a `len()`). -if sys.version_info >= (3, 9): - Buffer = typing.Union[bytes, bytearray, memoryview] -else: - Buffer = typing.ByteString - - -def _check_bytes(name: str, value: bytes) -> None: - if not isinstance(value, bytes): - raise TypeError(f"{name} must be bytes") - - -def _check_byteslike(name: str, value: Buffer) -> None: - try: - memoryview(value) - except TypeError: - raise TypeError(f"{name} must be bytes-like") - - -def int_to_bytes(integer: int, length: int | None = None) -> bytes: - if length == 0: - raise ValueError("length argument can't be 0") - return integer.to_bytes( - length or (integer.bit_length() + 7) // 8 or 1, "big" - ) - - -class InterfaceNotImplemented(Exception): - pass - - -class _DeprecatedValue: - def __init__(self, value: object, message: str, warning_class): - self.value = value - self.message = message - self.warning_class = warning_class - - -class _ModuleWithDeprecations(types.ModuleType): - def __init__(self, module: types.ModuleType): - super().__init__(module.__name__) - self.__dict__["_module"] = module - - def __getattr__(self, attr: str) -> object: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - obj = obj.value - return obj - - def __setattr__(self, attr: str, value: object) -> None: - setattr(self._module, attr, value) - - def __delattr__(self, attr: str) -> None: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - - delattr(self._module, attr) - - def __dir__(self) -> Sequence[str]: - return ["_module", *dir(self._module)] - - -def deprecated( - value: object, - module_name: str, - message: str, - warning_class: type[Warning], - name: str | None = None, -) -> _DeprecatedValue: - module = sys.modules[module_name] - if not isinstance(module, _ModuleWithDeprecations): - sys.modules[module_name] = module = _ModuleWithDeprecations(module) - dv = _DeprecatedValue(value, message, warning_class) - # Maintain backwards compatibility with `name is None` for pyOpenSSL. - if name is not None: - setattr(module, name, dv) - return dv - - -def cached_property(func: Callable) -> property: - cached_name = f"_cached_{func}" - sentinel = object() - - def inner(instance: object): - cache = getattr(instance, cached_name, sentinel) - if cache is not sentinel: - return cache - result = func(instance) - setattr(instance, cached_name, result) - return result - - return property(inner) - - -# Python 3.10 changed representation of enums. We use well-defined object -# representation and string representation from Python 3.9. -class Enum(enum.Enum): - def __repr__(self) -> str: - return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" - - def __str__(self) -> str: - return f"{self.__class__.__name__}.{self._name_}" diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__init__.py b/venv/lib/python3.10/site-packages/cryptography/x509/__init__.py deleted file mode 100644 index 318eecc..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/__init__.py +++ /dev/null @@ -1,270 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.x509 import certificate_transparency, verification -from cryptography.x509.base import ( - Attribute, - AttributeNotFound, - Attributes, - Certificate, - CertificateBuilder, - CertificateRevocationList, - CertificateRevocationListBuilder, - CertificateSigningRequest, - CertificateSigningRequestBuilder, - InvalidVersion, - RevokedCertificate, - RevokedCertificateBuilder, - Version, - load_der_x509_certificate, - load_der_x509_crl, - load_der_x509_csr, - load_pem_x509_certificate, - load_pem_x509_certificates, - load_pem_x509_crl, - load_pem_x509_csr, - random_serial_number, -) -from cryptography.x509.extensions import ( - AccessDescription, - Admission, - Admissions, - AuthorityInformationAccess, - AuthorityKeyIdentifier, - BasicConstraints, - CertificateIssuer, - CertificatePolicies, - CRLDistributionPoints, - CRLNumber, - CRLReason, - DeltaCRLIndicator, - DistributionPoint, - DuplicateExtension, - ExtendedKeyUsage, - Extension, - ExtensionNotFound, - Extensions, - ExtensionType, - FreshestCRL, - GeneralNames, - InhibitAnyPolicy, - InvalidityDate, - IssuerAlternativeName, - IssuingDistributionPoint, - KeyUsage, - MSCertificateTemplate, - NameConstraints, - NamingAuthority, - NoticeReference, - OCSPAcceptableResponses, - OCSPNoCheck, - OCSPNonce, - PolicyConstraints, - PolicyInformation, - PrecertificateSignedCertificateTimestamps, - PrecertPoison, - PrivateKeyUsagePeriod, - ProfessionInfo, - ReasonFlags, - SignedCertificateTimestamps, - SubjectAlternativeName, - SubjectInformationAccess, - SubjectKeyIdentifier, - TLSFeature, - TLSFeatureType, - UnrecognizedExtension, - UserNotice, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - UnsupportedGeneralNameType, -) -from cryptography.x509.name import ( - Name, - NameAttribute, - RelativeDistinguishedName, -) -from cryptography.x509.oid import ( - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, -) - -OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS -OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER -OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS -OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES -OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS -OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE -OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL -OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY -OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME -OID_KEY_USAGE = ExtensionOID.KEY_USAGE -OID_PRIVATE_KEY_USAGE_PERIOD = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD -OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS -OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK -OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS -OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS -OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME -OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES -OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS -OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER - -OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 -OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 -OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 -OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 -OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 -OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 -OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 -OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 -OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 -OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 -OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 -OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 -OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 -OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 -OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS - -OID_COMMON_NAME = NameOID.COMMON_NAME -OID_COUNTRY_NAME = NameOID.COUNTRY_NAME -OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT -OID_DN_QUALIFIER = NameOID.DN_QUALIFIER -OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS -OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER -OID_GIVEN_NAME = NameOID.GIVEN_NAME -OID_LOCALITY_NAME = NameOID.LOCALITY_NAME -OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME -OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME -OID_PSEUDONYM = NameOID.PSEUDONYM -OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER -OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME -OID_SURNAME = NameOID.SURNAME -OID_TITLE = NameOID.TITLE - -OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH -OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING -OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION -OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING -OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH -OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING - -OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY -OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER -OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE - -OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER -OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON -OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE - -OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS -OID_OCSP = AuthorityInformationAccessOID.OCSP - -__all__ = [ - "OID_CA_ISSUERS", - "OID_OCSP", - "AccessDescription", - "Admission", - "Admissions", - "Attribute", - "AttributeNotFound", - "Attributes", - "AuthorityInformationAccess", - "AuthorityKeyIdentifier", - "BasicConstraints", - "CRLDistributionPoints", - "CRLNumber", - "CRLReason", - "Certificate", - "CertificateBuilder", - "CertificateIssuer", - "CertificatePolicies", - "CertificateRevocationList", - "CertificateRevocationListBuilder", - "CertificateSigningRequest", - "CertificateSigningRequestBuilder", - "DNSName", - "DeltaCRLIndicator", - "DirectoryName", - "DistributionPoint", - "DuplicateExtension", - "ExtendedKeyUsage", - "Extension", - "ExtensionNotFound", - "ExtensionType", - "Extensions", - "FreshestCRL", - "GeneralName", - "GeneralNames", - "IPAddress", - "InhibitAnyPolicy", - "InvalidVersion", - "InvalidityDate", - "IssuerAlternativeName", - "IssuingDistributionPoint", - "KeyUsage", - "MSCertificateTemplate", - "Name", - "NameAttribute", - "NameConstraints", - "NameOID", - "NamingAuthority", - "NoticeReference", - "OCSPAcceptableResponses", - "OCSPNoCheck", - "OCSPNonce", - "ObjectIdentifier", - "OtherName", - "PolicyConstraints", - "PolicyInformation", - "PrecertPoison", - "PrecertificateSignedCertificateTimestamps", - "PrivateKeyUsagePeriod", - "ProfessionInfo", - "PublicKeyAlgorithmOID", - "RFC822Name", - "ReasonFlags", - "RegisteredID", - "RelativeDistinguishedName", - "RevokedCertificate", - "RevokedCertificateBuilder", - "SignatureAlgorithmOID", - "SignedCertificateTimestamps", - "SubjectAlternativeName", - "SubjectInformationAccess", - "SubjectKeyIdentifier", - "TLSFeature", - "TLSFeatureType", - "UniformResourceIdentifier", - "UnrecognizedExtension", - "UnsupportedGeneralNameType", - "UserNotice", - "Version", - "certificate_transparency", - "load_der_x509_certificate", - "load_der_x509_crl", - "load_der_x509_csr", - "load_pem_x509_certificate", - "load_pem_x509_certificates", - "load_pem_x509_crl", - "load_pem_x509_csr", - "random_serial_number", - "verification", - "verification", -] diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index e1895f7..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc deleted file mode 100644 index 67b144b..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc deleted file mode 100644 index 36f503f..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc deleted file mode 100644 index cbaee34..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc deleted file mode 100644 index dc008a7..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc deleted file mode 100644 index 0901a08..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc deleted file mode 100644 index cc66423..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc deleted file mode 100644 index b29c398..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc b/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc deleted file mode 100644 index 7689500..0000000 Binary files a/venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/base.py b/venv/lib/python3.10/site-packages/cryptography/x509/base.py deleted file mode 100644 index 1be612b..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/base.py +++ /dev/null @@ -1,848 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import os -import typing -import warnings -from collections.abc import Iterable - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - padding, - rsa, - x448, - x25519, -) -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.extensions import ( - Extension, - Extensions, - ExtensionType, - _make_sequence_methods, -) -from cryptography.x509.name import Name, _ASN1Type -from cryptography.x509.oid import ObjectIdentifier - -_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) - -# This must be kept in sync with sign.rs's list of allowable types in -# identify_hash_type -_AllowedHashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, -] - - -class AttributeNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -def _reject_duplicate_extension( - extension: Extension[ExtensionType], - extensions: list[Extension[ExtensionType]], -) -> None: - # This is quadratic in the number of extensions - for e in extensions: - if e.oid == extension.oid: - raise ValueError("This extension has already been set.") - - -def _reject_duplicate_attribute( - oid: ObjectIdentifier, - attributes: list[tuple[ObjectIdentifier, bytes, int | None]], -) -> None: - # This is quadratic in the number of attributes - for attr_oid, _, _ in attributes: - if attr_oid == oid: - raise ValueError("This attribute has already been set.") - - -def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: - """Normalizes a datetime to a naive datetime in UTC. - - time -- datetime to normalize. Assumed to be in UTC if not timezone - aware. - """ - if time.tzinfo is not None: - offset = time.utcoffset() - offset = offset if offset else datetime.timedelta() - return time.replace(tzinfo=None) - offset - else: - return time - - -class Attribute: - def __init__( - self, - oid: ObjectIdentifier, - value: bytes, - _type: int = _ASN1Type.UTF8String.value, - ) -> None: - self._oid = oid - self._value = value - self._type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Attribute): - return NotImplemented - - return ( - self.oid == other.oid - and self.value == other.value - and self._type == other._type - ) - - def __hash__(self) -> int: - return hash((self.oid, self.value, self._type)) - - -class Attributes: - def __init__( - self, - attributes: Iterable[Attribute], - ) -> None: - self._attributes = list(attributes) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") - - def __repr__(self) -> str: - return f"" - - def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: - for attr in self: - if attr.oid == oid: - return attr - - raise AttributeNotFound(f"No {oid} attribute was found", oid) - - -class Version(utils.Enum): - v1 = 0 - v3 = 2 - - -class InvalidVersion(Exception): - def __init__(self, msg: str, parsed_version: int) -> None: - super().__init__(msg) - self.parsed_version = parsed_version - - -Certificate = rust_x509.Certificate - - -class RevokedCertificate(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - Returns the serial number of the revoked certificate. - """ - - @property - @abc.abstractmethod - def revocation_date(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked. - """ - - @property - @abc.abstractmethod - def revocation_date_utc(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked as a non-naive - UTC datetime. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns an Extensions object containing a list of Revoked extensions. - """ - - -# Runtime isinstance checks need this since the rust class is not a subclass. -RevokedCertificate.register(rust_x509.RevokedCertificate) - - -class _RawRevokedCertificate(RevokedCertificate): - def __init__( - self, - serial_number: int, - revocation_date: datetime.datetime, - extensions: Extensions, - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - @property - def serial_number(self) -> int: - return self._serial_number - - @property - def revocation_date(self) -> datetime.datetime: - warnings.warn( - "Properties that return a naïve datetime object have been " - "deprecated. Please switch to revocation_date_utc.", - utils.DeprecatedIn42, - stacklevel=2, - ) - return self._revocation_date - - @property - def revocation_date_utc(self) -> datetime.datetime: - return self._revocation_date.replace(tzinfo=datetime.timezone.utc) - - @property - def extensions(self) -> Extensions: - return self._extensions - - -CertificateRevocationList = rust_x509.CertificateRevocationList -CertificateSigningRequest = rust_x509.CertificateSigningRequest - - -load_pem_x509_certificate = rust_x509.load_pem_x509_certificate -load_der_x509_certificate = rust_x509.load_der_x509_certificate - -load_pem_x509_certificates = rust_x509.load_pem_x509_certificates - -load_pem_x509_csr = rust_x509.load_pem_x509_csr -load_der_x509_csr = rust_x509.load_der_x509_csr - -load_pem_x509_crl = rust_x509.load_pem_x509_crl -load_der_x509_crl = rust_x509.load_der_x509_crl - - -class CertificateSigningRequestBuilder: - def __init__( - self, - subject_name: Name | None = None, - extensions: list[Extension[ExtensionType]] = [], - attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], - ): - """ - Creates an empty X.509 certificate request (v1). - """ - self._subject_name = subject_name - self._extensions = extensions - self._attributes = attributes - - def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: - """ - Sets the certificate requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateSigningRequestBuilder( - name, self._extensions, self._attributes - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 extension to the certificate request. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateSigningRequestBuilder( - self._subject_name, - [*self._extensions, extension], - self._attributes, - ) - - def add_attribute( - self, - oid: ObjectIdentifier, - value: bytes, - *, - _tag: _ASN1Type | None = None, - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 attribute with an OID and associated value. - """ - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - - if not isinstance(value, bytes): - raise TypeError("value must be bytes") - - if _tag is not None and not isinstance(_tag, _ASN1Type): - raise TypeError("tag must be _ASN1Type") - - _reject_duplicate_attribute(oid, self._attributes) - - if _tag is not None: - tag = _tag.value - else: - tag = None - - return CertificateSigningRequestBuilder( - self._subject_name, - self._extensions, - [*self._attributes, (oid, value, tag)], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> CertificateSigningRequest: - """ - Signs the request using the requestor's private key. - """ - if self._subject_name is None: - raise ValueError("A CertificateSigningRequest must have a subject") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_csr( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class CertificateBuilder: - _extensions: list[Extension[ExtensionType]] - - def __init__( - self, - issuer_name: Name | None = None, - subject_name: Name | None = None, - public_key: CertificatePublicKeyTypes | None = None, - serial_number: int | None = None, - not_valid_before: datetime.datetime | None = None, - not_valid_after: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ) -> None: - self._version = Version.v3 - self._issuer_name = issuer_name - self._subject_name = subject_name - self._public_key = public_key - self._serial_number = serial_number - self._not_valid_before = not_valid_before - self._not_valid_after = not_valid_after - self._extensions = extensions - - def issuer_name(self, name: Name) -> CertificateBuilder: - """ - Sets the CA's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateBuilder( - name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def subject_name(self, name: Name) -> CertificateBuilder: - """ - Sets the requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateBuilder( - self._issuer_name, - name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def public_key( - self, - key: CertificatePublicKeyTypes, - ) -> CertificateBuilder: - """ - Sets the requestor's public key (as found in the signing request). - """ - if not isinstance( - key, - ( - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, - ), - ): - raise TypeError( - "Expecting one of DSAPublicKey, RSAPublicKey," - " EllipticCurvePublicKey, Ed25519PublicKey," - " Ed448PublicKey, X25519PublicKey, or " - "X448PublicKey." - ) - if self._public_key is not None: - raise ValueError("The public key may only be set once.") - return CertificateBuilder( - self._issuer_name, - self._subject_name, - key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def serial_number(self, number: int) -> CertificateBuilder: - """ - Sets the certificate serial number. - """ - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive.") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate activation time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_before is not None: - raise ValueError("The not valid before may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid before date must be on or after" - " 1950 January 1)." - ) - if self._not_valid_after is not None and time > self._not_valid_after: - raise ValueError( - "The not valid before date must be before the not valid after " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - time, - self._not_valid_after, - self._extensions, - ) - - def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate expiration time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_after is not None: - raise ValueError("The not valid after may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid after date must be on or after 1950 January 1." - ) - if ( - self._not_valid_before is not None - and time < self._not_valid_before - ): - raise ValueError( - "The not valid after date must be after the not valid before " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - time, - self._extensions, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateBuilder: - """ - Adds an X.509 extension to the certificate. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> Certificate: - """ - Signs the certificate using the CA's private key. - """ - if self._subject_name is None: - raise ValueError("A certificate must have a subject name") - - if self._issuer_name is None: - raise ValueError("A certificate must have an issuer name") - - if self._serial_number is None: - raise ValueError("A certificate must have a serial number") - - if self._not_valid_before is None: - raise ValueError("A certificate must have a not valid before time") - - if self._not_valid_after is None: - raise ValueError("A certificate must have a not valid after time") - - if self._public_key is None: - raise ValueError("A certificate must have a public key") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_certificate( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class CertificateRevocationListBuilder: - _extensions: list[Extension[ExtensionType]] - _revoked_certificates: list[RevokedCertificate] - - def __init__( - self, - issuer_name: Name | None = None, - last_update: datetime.datetime | None = None, - next_update: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - revoked_certificates: list[RevokedCertificate] = [], - ): - self._issuer_name = issuer_name - self._last_update = last_update - self._next_update = next_update - self._extensions = extensions - self._revoked_certificates = revoked_certificates - - def issuer_name( - self, issuer_name: Name - ) -> CertificateRevocationListBuilder: - if not isinstance(issuer_name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateRevocationListBuilder( - issuer_name, - self._last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def last_update( - self, last_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(last_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._last_update is not None: - raise ValueError("Last update may only be set once.") - last_update = _convert_to_naive_utc_time(last_update) - if last_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._next_update is not None and last_update > self._next_update: - raise ValueError( - "The last update date must be before the next update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def next_update( - self, next_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(next_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._next_update is not None: - raise ValueError("Last update may only be set once.") - next_update = _convert_to_naive_utc_time(next_update) - if next_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._last_update is not None and next_update < self._last_update: - raise ValueError( - "The next update date must be after the last update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - next_update, - self._extensions, - self._revoked_certificates, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateRevocationListBuilder: - """ - Adds an X.509 extension to the certificate revocation list. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - [*self._extensions, extension], - self._revoked_certificates, - ) - - def add_revoked_certificate( - self, revoked_certificate: RevokedCertificate - ) -> CertificateRevocationListBuilder: - """ - Adds a revoked certificate to the CRL. - """ - if not isinstance(revoked_certificate, RevokedCertificate): - raise TypeError("Must be an instance of RevokedCertificate") - - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - self._extensions, - [*self._revoked_certificates, revoked_certificate], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ecdsa_deterministic: bool | None = None, - ) -> CertificateRevocationList: - if self._issuer_name is None: - raise ValueError("A CRL must have an issuer name") - - if self._last_update is None: - raise ValueError("A CRL must have a last update time") - - if self._next_update is None: - raise ValueError("A CRL must have a next update time") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - if ecdsa_deterministic is not None: - if not isinstance(private_key, ec.EllipticCurvePrivateKey): - raise TypeError( - "Deterministic ECDSA is only supported for EC keys" - ) - - return rust_x509.create_x509_crl( - self, - private_key, - algorithm, - rsa_padding, - ecdsa_deterministic, - ) - - -class RevokedCertificateBuilder: - def __init__( - self, - serial_number: int | None = None, - revocation_date: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - def serial_number(self, number: int) -> RevokedCertificateBuilder: - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return RevokedCertificateBuilder( - number, self._revocation_date, self._extensions - ) - - def revocation_date( - self, time: datetime.datetime - ) -> RevokedCertificateBuilder: - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._revocation_date is not None: - raise ValueError("The revocation date may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The revocation date must be on or after 1950 January 1." - ) - return RevokedCertificateBuilder( - self._serial_number, time, self._extensions - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> RevokedCertificateBuilder: - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return RevokedCertificateBuilder( - self._serial_number, - self._revocation_date, - [*self._extensions, extension], - ) - - def build(self, backend: typing.Any = None) -> RevokedCertificate: - if self._serial_number is None: - raise ValueError("A revoked certificate must have a serial number") - if self._revocation_date is None: - raise ValueError( - "A revoked certificate must have a revocation date" - ) - return _RawRevokedCertificate( - self._serial_number, - self._revocation_date, - Extensions(self._extensions), - ) - - -def random_serial_number() -> int: - return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py b/venv/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py deleted file mode 100644 index fb66cc6..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 - - -class LogEntryType(utils.Enum): - X509_CERTIFICATE = 0 - PRE_CERTIFICATE = 1 - - -class Version(utils.Enum): - v1 = 0 - - -class SignatureAlgorithm(utils.Enum): - """ - Signature algorithms that are valid for SCTs. - - These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). - - See: - """ - - ANONYMOUS = 0 - RSA = 1 - DSA = 2 - ECDSA = 3 - - -SignedCertificateTimestamp = rust_x509.Sct diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/extensions.py b/venv/lib/python3.10/site-packages/cryptography/x509/extensions.py deleted file mode 100644 index dfa472d..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/extensions.py +++ /dev/null @@ -1,2528 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import hashlib -import ipaddress -import typing -from collections.abc import Iterable, Iterator - -from cryptography import utils -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import constant_time, serialization -from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey -from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.certificate_transparency import ( - SignedCertificateTimestamp, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - _IPAddressTypes, -) -from cryptography.x509.name import Name, RelativeDistinguishedName -from cryptography.x509.oid import ( - CRLEntryExtensionOID, - ExtensionOID, - ObjectIdentifier, - OCSPExtensionOID, -) - -ExtensionTypeVar = typing.TypeVar( - "ExtensionTypeVar", bound="ExtensionType", covariant=True -) - - -def _key_identifier_from_public_key( - public_key: CertificatePublicKeyTypes, -) -> bytes: - if isinstance(public_key, RSAPublicKey): - data = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.PKCS1, - ) - elif isinstance(public_key, EllipticCurvePublicKey): - data = public_key.public_bytes( - serialization.Encoding.X962, - serialization.PublicFormat.UncompressedPoint, - ) - else: - # This is a very slow way to do this. - serialized = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.SubjectPublicKeyInfo, - ) - data = asn1.parse_spki_for_data(serialized) - - return hashlib.sha1(data).digest() - - -def _make_sequence_methods(field_name: str): - def len_method(self) -> int: - return len(getattr(self, field_name)) - - def iter_method(self): - return iter(getattr(self, field_name)) - - def getitem_method(self, idx): - return getattr(self, field_name)[idx] - - return len_method, iter_method, getitem_method - - -class DuplicateExtension(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionType(metaclass=abc.ABCMeta): - oid: typing.ClassVar[ObjectIdentifier] - - def public_bytes(self) -> bytes: - """ - Serializes the extension type to DER. - """ - raise NotImplementedError( - f"public_bytes is not implemented for extension type {self!r}" - ) - - -class Extensions: - def __init__(self, extensions: Iterable[Extension[ExtensionType]]) -> None: - self._extensions = list(extensions) - - def get_extension_for_oid( - self, oid: ObjectIdentifier - ) -> Extension[ExtensionType]: - for ext in self: - if ext.oid == oid: - return ext - - raise ExtensionNotFound(f"No {oid} extension was found", oid) - - def get_extension_for_class( - self, extclass: type[ExtensionTypeVar] - ) -> Extension[ExtensionTypeVar]: - if extclass is UnrecognizedExtension: - raise TypeError( - "UnrecognizedExtension can't be used with " - "get_extension_for_class because more than one instance of the" - " class may be present." - ) - - for ext in self: - if isinstance(ext.value, extclass): - return ext - - raise ExtensionNotFound( - f"No {extclass} extension was found", extclass.oid - ) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") - - def __repr__(self) -> str: - return f"" - - -class CRLNumber(ExtensionType): - oid = ExtensionOID.CRL_NUMBER - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLNumber): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - @property - def crl_number(self) -> int: - return self._crl_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityKeyIdentifier(ExtensionType): - oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER - - def __init__( - self, - key_identifier: bytes | None, - authority_cert_issuer: Iterable[GeneralName] | None, - authority_cert_serial_number: int | None, - ) -> None: - if (authority_cert_issuer is None) != ( - authority_cert_serial_number is None - ): - raise ValueError( - "authority_cert_issuer and authority_cert_serial_number " - "must both be present or both None" - ) - - if authority_cert_issuer is not None: - authority_cert_issuer = list(authority_cert_issuer) - if not all( - isinstance(x, GeneralName) for x in authority_cert_issuer - ): - raise TypeError( - "authority_cert_issuer must be a list of GeneralName " - "objects" - ) - - if authority_cert_serial_number is not None and not isinstance( - authority_cert_serial_number, int - ): - raise TypeError("authority_cert_serial_number must be an integer") - - self._key_identifier = key_identifier - self._authority_cert_issuer = authority_cert_issuer - self._authority_cert_serial_number = authority_cert_serial_number - - # This takes a subset of CertificatePublicKeyTypes because an issuer - # cannot have an X25519/X448 key. This introduces some unfortunate - # asymmetry that requires typing users to explicitly - # narrow their type, but we should make this accurate and not just - # convenient. - @classmethod - def from_issuer_public_key( - cls, public_key: CertificateIssuerPublicKeyTypes - ) -> AuthorityKeyIdentifier: - digest = _key_identifier_from_public_key(public_key) - return cls( - key_identifier=digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - @classmethod - def from_issuer_subject_key_identifier( - cls, ski: SubjectKeyIdentifier - ) -> AuthorityKeyIdentifier: - return cls( - key_identifier=ski.digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityKeyIdentifier): - return NotImplemented - - return ( - self.key_identifier == other.key_identifier - and self.authority_cert_issuer == other.authority_cert_issuer - and self.authority_cert_serial_number - == other.authority_cert_serial_number - ) - - def __hash__(self) -> int: - if self.authority_cert_issuer is None: - aci = None - else: - aci = tuple(self.authority_cert_issuer) - return hash( - (self.key_identifier, aci, self.authority_cert_serial_number) - ) - - @property - def key_identifier(self) -> bytes | None: - return self._key_identifier - - @property - def authority_cert_issuer( - self, - ) -> list[GeneralName] | None: - return self._authority_cert_issuer - - @property - def authority_cert_serial_number(self) -> int | None: - return self._authority_cert_serial_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectKeyIdentifier(ExtensionType): - oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER - - def __init__(self, digest: bytes) -> None: - self._digest = digest - - @classmethod - def from_public_key( - cls, public_key: CertificatePublicKeyTypes - ) -> SubjectKeyIdentifier: - return cls(_key_identifier_from_public_key(public_key)) - - @property - def digest(self) -> bytes: - return self._digest - - @property - def key_identifier(self) -> bytes: - return self._digest - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectKeyIdentifier): - return NotImplemented - - return constant_time.bytes_eq(self.digest, other.digest) - - def __hash__(self) -> int: - return hash(self.digest) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityInformationAccess(ExtensionType): - oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS - - def __init__(self, descriptions: Iterable[AccessDescription]) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectInformationAccess(ExtensionType): - oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS - - def __init__(self, descriptions: Iterable[AccessDescription]) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AccessDescription: - def __init__( - self, access_method: ObjectIdentifier, access_location: GeneralName - ) -> None: - if not isinstance(access_method, ObjectIdentifier): - raise TypeError("access_method must be an ObjectIdentifier") - - if not isinstance(access_location, GeneralName): - raise TypeError("access_location must be a GeneralName") - - self._access_method = access_method - self._access_location = access_location - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AccessDescription): - return NotImplemented - - return ( - self.access_method == other.access_method - and self.access_location == other.access_location - ) - - def __hash__(self) -> int: - return hash((self.access_method, self.access_location)) - - @property - def access_method(self) -> ObjectIdentifier: - return self._access_method - - @property - def access_location(self) -> GeneralName: - return self._access_location - - -class BasicConstraints(ExtensionType): - oid = ExtensionOID.BASIC_CONSTRAINTS - - def __init__(self, ca: bool, path_length: int | None) -> None: - if not isinstance(ca, bool): - raise TypeError("ca must be a boolean value") - - if path_length is not None and not ca: - raise ValueError("path_length must be None when ca is False") - - if path_length is not None and ( - not isinstance(path_length, int) or path_length < 0 - ): - raise TypeError( - "path_length must be a non-negative integer or None" - ) - - self._ca = ca - self._path_length = path_length - - @property - def ca(self) -> bool: - return self._ca - - @property - def path_length(self) -> int | None: - return self._path_length - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, BasicConstraints): - return NotImplemented - - return self.ca == other.ca and self.path_length == other.path_length - - def __hash__(self) -> int: - return hash((self.ca, self.path_length)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DeltaCRLIndicator(ExtensionType): - oid = ExtensionOID.DELTA_CRL_INDICATOR - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - @property - def crl_number(self) -> int: - return self._crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DeltaCRLIndicator): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLDistributionPoints(ExtensionType): - oid = ExtensionOID.CRL_DISTRIBUTION_POINTS - - def __init__( - self, distribution_points: Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLDistributionPoints): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class FreshestCRL(ExtensionType): - oid = ExtensionOID.FRESHEST_CRL - - def __init__( - self, distribution_points: Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, FreshestCRL): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DistributionPoint: - def __init__( - self, - full_name: Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - reasons: frozenset[ReasonFlags] | None, - crl_issuer: Iterable[GeneralName] | None, - ) -> None: - if full_name and relative_name: - raise ValueError( - "You cannot provide both full_name and relative_name, at " - "least one must be None." - ) - if not full_name and not relative_name and not crl_issuer: - raise ValueError( - "Either full_name, relative_name or crl_issuer must be " - "provided." - ) - - if full_name is not None: - full_name = list(full_name) - if not all(isinstance(x, GeneralName) for x in full_name): - raise TypeError( - "full_name must be a list of GeneralName objects" - ) - - if relative_name: - if not isinstance(relative_name, RelativeDistinguishedName): - raise TypeError( - "relative_name must be a RelativeDistinguishedName" - ) - - if crl_issuer is not None: - crl_issuer = list(crl_issuer) - if not all(isinstance(x, GeneralName) for x in crl_issuer): - raise TypeError( - "crl_issuer must be None or a list of general names" - ) - - if reasons and ( - not isinstance(reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in reasons) - ): - raise TypeError("reasons must be None or frozenset of ReasonFlags") - - if reasons and ( - ReasonFlags.unspecified in reasons - or ReasonFlags.remove_from_crl in reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in a " - "DistributionPoint" - ) - - self._full_name = full_name - self._relative_name = relative_name - self._reasons = reasons - self._crl_issuer = crl_issuer - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.reasons == other.reasons - and self.crl_issuer == other.crl_issuer - ) - - def __hash__(self) -> int: - if self.full_name is not None: - fn: tuple[GeneralName, ...] | None = tuple(self.full_name) - else: - fn = None - - if self.crl_issuer is not None: - crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) - else: - crl_issuer = None - - return hash((fn, self.relative_name, self.reasons, crl_issuer)) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def reasons(self) -> frozenset[ReasonFlags] | None: - return self._reasons - - @property - def crl_issuer(self) -> list[GeneralName] | None: - return self._crl_issuer - - -class ReasonFlags(utils.Enum): - unspecified = "unspecified" - key_compromise = "keyCompromise" - ca_compromise = "cACompromise" - affiliation_changed = "affiliationChanged" - superseded = "superseded" - cessation_of_operation = "cessationOfOperation" - certificate_hold = "certificateHold" - privilege_withdrawn = "privilegeWithdrawn" - aa_compromise = "aACompromise" - remove_from_crl = "removeFromCRL" - - -# These are distribution point bit string mappings. Not to be confused with -# CRLReason reason flags bit string mappings. -# ReasonFlags ::= BIT STRING { -# unused (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# privilegeWithdrawn (7), -# aACompromise (8) } -_REASON_BIT_MAPPING = { - 1: ReasonFlags.key_compromise, - 2: ReasonFlags.ca_compromise, - 3: ReasonFlags.affiliation_changed, - 4: ReasonFlags.superseded, - 5: ReasonFlags.cessation_of_operation, - 6: ReasonFlags.certificate_hold, - 7: ReasonFlags.privilege_withdrawn, - 8: ReasonFlags.aa_compromise, -} - -_CRLREASONFLAGS = { - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.privilege_withdrawn: 7, - ReasonFlags.aa_compromise: 8, -} - -# CRLReason ::= ENUMERATED { -# unspecified (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# -- value 7 is not used -# removeFromCRL (8), -# privilegeWithdrawn (9), -# aACompromise (10) } -_CRL_ENTRY_REASON_ENUM_TO_CODE = { - ReasonFlags.unspecified: 0, - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.remove_from_crl: 8, - ReasonFlags.privilege_withdrawn: 9, - ReasonFlags.aa_compromise: 10, -} - - -class PolicyConstraints(ExtensionType): - oid = ExtensionOID.POLICY_CONSTRAINTS - - def __init__( - self, - require_explicit_policy: int | None, - inhibit_policy_mapping: int | None, - ) -> None: - if require_explicit_policy is not None and not isinstance( - require_explicit_policy, int - ): - raise TypeError( - "require_explicit_policy must be a non-negative integer or " - "None" - ) - - if inhibit_policy_mapping is not None and not isinstance( - inhibit_policy_mapping, int - ): - raise TypeError( - "inhibit_policy_mapping must be a non-negative integer or None" - ) - - if inhibit_policy_mapping is None and require_explicit_policy is None: - raise ValueError( - "At least one of require_explicit_policy and " - "inhibit_policy_mapping must not be None" - ) - - self._require_explicit_policy = require_explicit_policy - self._inhibit_policy_mapping = inhibit_policy_mapping - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyConstraints): - return NotImplemented - - return ( - self.require_explicit_policy == other.require_explicit_policy - and self.inhibit_policy_mapping == other.inhibit_policy_mapping - ) - - def __hash__(self) -> int: - return hash( - (self.require_explicit_policy, self.inhibit_policy_mapping) - ) - - @property - def require_explicit_policy(self) -> int | None: - return self._require_explicit_policy - - @property - def inhibit_policy_mapping(self) -> int | None: - return self._inhibit_policy_mapping - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificatePolicies(ExtensionType): - oid = ExtensionOID.CERTIFICATE_POLICIES - - def __init__(self, policies: Iterable[PolicyInformation]) -> None: - policies = list(policies) - if not all(isinstance(x, PolicyInformation) for x in policies): - raise TypeError( - "Every item in the policies list must be a PolicyInformation" - ) - - self._policies = policies - - __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificatePolicies): - return NotImplemented - - return self._policies == other._policies - - def __hash__(self) -> int: - return hash(tuple(self._policies)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PolicyInformation: - def __init__( - self, - policy_identifier: ObjectIdentifier, - policy_qualifiers: Iterable[str | UserNotice] | None, - ) -> None: - if not isinstance(policy_identifier, ObjectIdentifier): - raise TypeError("policy_identifier must be an ObjectIdentifier") - - self._policy_identifier = policy_identifier - - if policy_qualifiers is not None: - policy_qualifiers = list(policy_qualifiers) - if not all( - isinstance(x, (str, UserNotice)) for x in policy_qualifiers - ): - raise TypeError( - "policy_qualifiers must be a list of strings and/or " - "UserNotice objects or None" - ) - - self._policy_qualifiers = policy_qualifiers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyInformation): - return NotImplemented - - return ( - self.policy_identifier == other.policy_identifier - and self.policy_qualifiers == other.policy_qualifiers - ) - - def __hash__(self) -> int: - if self.policy_qualifiers is not None: - pq = tuple(self.policy_qualifiers) - else: - pq = None - - return hash((self.policy_identifier, pq)) - - @property - def policy_identifier(self) -> ObjectIdentifier: - return self._policy_identifier - - @property - def policy_qualifiers( - self, - ) -> list[str | UserNotice] | None: - return self._policy_qualifiers - - -class UserNotice: - def __init__( - self, - notice_reference: NoticeReference | None, - explicit_text: str | None, - ) -> None: - if notice_reference and not isinstance( - notice_reference, NoticeReference - ): - raise TypeError( - "notice_reference must be None or a NoticeReference" - ) - - self._notice_reference = notice_reference - self._explicit_text = explicit_text - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UserNotice): - return NotImplemented - - return ( - self.notice_reference == other.notice_reference - and self.explicit_text == other.explicit_text - ) - - def __hash__(self) -> int: - return hash((self.notice_reference, self.explicit_text)) - - @property - def notice_reference(self) -> NoticeReference | None: - return self._notice_reference - - @property - def explicit_text(self) -> str | None: - return self._explicit_text - - -class NoticeReference: - def __init__( - self, - organization: str | None, - notice_numbers: Iterable[int], - ) -> None: - self._organization = organization - notice_numbers = list(notice_numbers) - if not all(isinstance(x, int) for x in notice_numbers): - raise TypeError("notice_numbers must be a list of integers") - - self._notice_numbers = notice_numbers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NoticeReference): - return NotImplemented - - return ( - self.organization == other.organization - and self.notice_numbers == other.notice_numbers - ) - - def __hash__(self) -> int: - return hash((self.organization, tuple(self.notice_numbers))) - - @property - def organization(self) -> str | None: - return self._organization - - @property - def notice_numbers(self) -> list[int]: - return self._notice_numbers - - -class ExtendedKeyUsage(ExtensionType): - oid = ExtensionOID.EXTENDED_KEY_USAGE - - def __init__(self, usages: Iterable[ObjectIdentifier]) -> None: - usages = list(usages) - if not all(isinstance(x, ObjectIdentifier) for x in usages): - raise TypeError( - "Every item in the usages list must be an ObjectIdentifier" - ) - - self._usages = usages - - __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ExtendedKeyUsage): - return NotImplemented - - return self._usages == other._usages - - def __hash__(self) -> int: - return hash(tuple(self._usages)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNoCheck(ExtensionType): - oid = ExtensionOID.OCSP_NO_CHECK - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNoCheck): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(OCSPNoCheck) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertPoison(ExtensionType): - oid = ExtensionOID.PRECERT_POISON - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertPoison): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(PrecertPoison) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeature(ExtensionType): - oid = ExtensionOID.TLS_FEATURE - - def __init__(self, features: Iterable[TLSFeatureType]) -> None: - features = list(features) - if ( - not all(isinstance(x, TLSFeatureType) for x in features) - or len(features) == 0 - ): - raise TypeError( - "features must be a list of elements from the TLSFeatureType " - "enum" - ) - - self._features = features - - __len__, __iter__, __getitem__ = _make_sequence_methods("_features") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, TLSFeature): - return NotImplemented - - return self._features == other._features - - def __hash__(self) -> int: - return hash(tuple(self._features)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeatureType(utils.Enum): - # status_request is defined in RFC 6066 and is used for what is commonly - # called OCSP Must-Staple when present in the TLS Feature extension in an - # X.509 certificate. - status_request = 5 - # status_request_v2 is defined in RFC 6961 and allows multiple OCSP - # responses to be provided. It is not currently in use by clients or - # servers. - status_request_v2 = 17 - - -_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} - - -class InhibitAnyPolicy(ExtensionType): - oid = ExtensionOID.INHIBIT_ANY_POLICY - - def __init__(self, skip_certs: int) -> None: - if not isinstance(skip_certs, int): - raise TypeError("skip_certs must be an integer") - - if skip_certs < 0: - raise ValueError("skip_certs must be a non-negative integer") - - self._skip_certs = skip_certs - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InhibitAnyPolicy): - return NotImplemented - - return self.skip_certs == other.skip_certs - - def __hash__(self) -> int: - return hash(self.skip_certs) - - @property - def skip_certs(self) -> int: - return self._skip_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class KeyUsage(ExtensionType): - oid = ExtensionOID.KEY_USAGE - - def __init__( - self, - digital_signature: bool, - content_commitment: bool, - key_encipherment: bool, - data_encipherment: bool, - key_agreement: bool, - key_cert_sign: bool, - crl_sign: bool, - encipher_only: bool, - decipher_only: bool, - ) -> None: - if not key_agreement and (encipher_only or decipher_only): - raise ValueError( - "encipher_only and decipher_only can only be true when " - "key_agreement is true" - ) - - self._digital_signature = digital_signature - self._content_commitment = content_commitment - self._key_encipherment = key_encipherment - self._data_encipherment = data_encipherment - self._key_agreement = key_agreement - self._key_cert_sign = key_cert_sign - self._crl_sign = crl_sign - self._encipher_only = encipher_only - self._decipher_only = decipher_only - - @property - def digital_signature(self) -> bool: - return self._digital_signature - - @property - def content_commitment(self) -> bool: - return self._content_commitment - - @property - def key_encipherment(self) -> bool: - return self._key_encipherment - - @property - def data_encipherment(self) -> bool: - return self._data_encipherment - - @property - def key_agreement(self) -> bool: - return self._key_agreement - - @property - def key_cert_sign(self) -> bool: - return self._key_cert_sign - - @property - def crl_sign(self) -> bool: - return self._crl_sign - - @property - def encipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "encipher_only is undefined unless key_agreement is true" - ) - else: - return self._encipher_only - - @property - def decipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "decipher_only is undefined unless key_agreement is true" - ) - else: - return self._decipher_only - - def __repr__(self) -> str: - try: - encipher_only = self.encipher_only - decipher_only = self.decipher_only - except ValueError: - # Users found None confusing because even though encipher/decipher - # have no meaning unless key_agreement is true, to construct an - # instance of the class you still need to pass False. - encipher_only = False - decipher_only = False - - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, KeyUsage): - return NotImplemented - - return ( - self.digital_signature == other.digital_signature - and self.content_commitment == other.content_commitment - and self.key_encipherment == other.key_encipherment - and self.data_encipherment == other.data_encipherment - and self.key_agreement == other.key_agreement - and self.key_cert_sign == other.key_cert_sign - and self.crl_sign == other.crl_sign - and self._encipher_only == other._encipher_only - and self._decipher_only == other._decipher_only - ) - - def __hash__(self) -> int: - return hash( - ( - self.digital_signature, - self.content_commitment, - self.key_encipherment, - self.data_encipherment, - self.key_agreement, - self.key_cert_sign, - self.crl_sign, - self._encipher_only, - self._decipher_only, - ) - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrivateKeyUsagePeriod(ExtensionType): - oid = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD - - def __init__( - self, - not_before: datetime.datetime | None, - not_after: datetime.datetime | None, - ) -> None: - if ( - not isinstance(not_before, datetime.datetime) - and not_before is not None - ): - raise TypeError("not_before must be a datetime.datetime or None") - - if ( - not isinstance(not_after, datetime.datetime) - and not_after is not None - ): - raise TypeError("not_after must be a datetime.datetime or None") - - if not_before is None and not_after is None: - raise ValueError( - "At least one of not_before and not_after must not be None" - ) - - if ( - not_before is not None - and not_after is not None - and not_before > not_after - ): - raise ValueError("not_before must be before not_after") - - self._not_before = not_before - self._not_after = not_after - - @property - def not_before(self) -> datetime.datetime | None: - return self._not_before - - @property - def not_after(self) -> datetime.datetime | None: - return self._not_after - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrivateKeyUsagePeriod): - return NotImplemented - - return ( - self.not_before == other.not_before - and self.not_after == other.not_after - ) - - def __hash__(self) -> int: - return hash((self.not_before, self.not_after)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class NameConstraints(ExtensionType): - oid = ExtensionOID.NAME_CONSTRAINTS - - def __init__( - self, - permitted_subtrees: Iterable[GeneralName] | None, - excluded_subtrees: Iterable[GeneralName] | None, - ) -> None: - if permitted_subtrees is not None: - permitted_subtrees = list(permitted_subtrees) - if not permitted_subtrees: - raise ValueError( - "permitted_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in permitted_subtrees): - raise TypeError( - "permitted_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(permitted_subtrees) - - if excluded_subtrees is not None: - excluded_subtrees = list(excluded_subtrees) - if not excluded_subtrees: - raise ValueError( - "excluded_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in excluded_subtrees): - raise TypeError( - "excluded_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(excluded_subtrees) - - if permitted_subtrees is None and excluded_subtrees is None: - raise ValueError( - "At least one of permitted_subtrees and excluded_subtrees " - "must not be None" - ) - - self._permitted_subtrees = permitted_subtrees - self._excluded_subtrees = excluded_subtrees - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameConstraints): - return NotImplemented - - return ( - self.excluded_subtrees == other.excluded_subtrees - and self.permitted_subtrees == other.permitted_subtrees - ) - - def _validate_tree(self, tree: Iterable[GeneralName]) -> None: - self._validate_ip_name(tree) - self._validate_dns_name(tree) - - def _validate_ip_name(self, tree: Iterable[GeneralName]) -> None: - if any( - isinstance(name, IPAddress) - and not isinstance( - name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) - ) - for name in tree - ): - raise TypeError( - "IPAddress name constraints must be an IPv4Network or" - " IPv6Network object" - ) - - def _validate_dns_name(self, tree: Iterable[GeneralName]) -> None: - if any( - isinstance(name, DNSName) and "*" in name.value for name in tree - ): - raise ValueError( - "DNSName name constraints must not contain the '*' wildcard" - " character" - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __hash__(self) -> int: - if self.permitted_subtrees is not None: - ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) - else: - ps = None - - if self.excluded_subtrees is not None: - es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) - else: - es = None - - return hash((ps, es)) - - @property - def permitted_subtrees( - self, - ) -> list[GeneralName] | None: - return self._permitted_subtrees - - @property - def excluded_subtrees( - self, - ) -> list[GeneralName] | None: - return self._excluded_subtrees - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class Extension(typing.Generic[ExtensionTypeVar]): - def __init__( - self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - - if not isinstance(critical, bool): - raise TypeError("critical must be a boolean value") - - self._oid = oid - self._critical = critical - self._value = value - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def critical(self) -> bool: - return self._critical - - @property - def value(self) -> ExtensionTypeVar: - return self._value - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Extension): - return NotImplemented - - return ( - self.oid == other.oid - and self.critical == other.critical - and self.value == other.value - ) - - def __hash__(self) -> int: - return hash((self.oid, self.critical, self.value)) - - -class GeneralNames: - def __init__(self, general_names: Iterable[GeneralName]) -> None: - general_names = list(general_names) - if not all(isinstance(x, GeneralName) for x in general_names): - raise TypeError( - "Every item in the general_names list must be an " - "object conforming to the GeneralName interface" - ) - - self._general_names = general_names - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - # Return the value of each GeneralName, except for OtherName instances - # which we return directly because it has two important properties not - # just one value. - objs = (i for i in self if isinstance(i, type)) - if type != OtherName: - return [i.value for i in objs] - return list(objs) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, GeneralNames): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(tuple(self._general_names)) - - -class SubjectAlternativeName(ExtensionType): - oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuerAlternativeName(ExtensionType): - oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuerAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificateIssuer(ExtensionType): - oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER - - def __init__(self, general_names: Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificateIssuer): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLReason(ExtensionType): - oid = CRLEntryExtensionOID.CRL_REASON - - def __init__(self, reason: ReasonFlags) -> None: - if not isinstance(reason, ReasonFlags): - raise TypeError("reason must be an element from ReasonFlags") - - self._reason = reason - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLReason): - return NotImplemented - - return self.reason == other.reason - - def __hash__(self) -> int: - return hash(self.reason) - - @property - def reason(self) -> ReasonFlags: - return self._reason - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class InvalidityDate(ExtensionType): - oid = CRLEntryExtensionOID.INVALIDITY_DATE - - def __init__(self, invalidity_date: datetime.datetime) -> None: - if not isinstance(invalidity_date, datetime.datetime): - raise TypeError("invalidity_date must be a datetime.datetime") - - self._invalidity_date = invalidity_date - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InvalidityDate): - return NotImplemented - - return self.invalidity_date == other.invalidity_date - - def __hash__(self) -> int: - return hash(self.invalidity_date) - - @property - def invalidity_date(self) -> datetime.datetime: - return self._invalidity_date - - @property - def invalidity_date_utc(self) -> datetime.datetime: - if self._invalidity_date.tzinfo is None: - return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) - else: - return self._invalidity_date.astimezone(tz=datetime.timezone.utc) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertificateSignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertificateSignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNonce(ExtensionType): - oid = OCSPExtensionOID.NONCE - - def __init__(self, nonce: bytes) -> None: - if not isinstance(nonce, bytes): - raise TypeError("nonce must be bytes") - - self._nonce = nonce - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNonce): - return NotImplemented - - return self.nonce == other.nonce - - def __hash__(self) -> int: - return hash(self.nonce) - - def __repr__(self) -> str: - return f"" - - @property - def nonce(self) -> bytes: - return self._nonce - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPAcceptableResponses(ExtensionType): - oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES - - def __init__(self, responses: Iterable[ObjectIdentifier]) -> None: - responses = list(responses) - if any(not isinstance(r, ObjectIdentifier) for r in responses): - raise TypeError("All responses must be ObjectIdentifiers") - - self._responses = responses - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPAcceptableResponses): - return NotImplemented - - return self._responses == other._responses - - def __hash__(self) -> int: - return hash(tuple(self._responses)) - - def __repr__(self) -> str: - return f"" - - def __iter__(self) -> Iterator[ObjectIdentifier]: - return iter(self._responses) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuingDistributionPoint(ExtensionType): - oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT - - def __init__( - self, - full_name: Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - only_contains_user_certs: bool, - only_contains_ca_certs: bool, - only_some_reasons: frozenset[ReasonFlags] | None, - indirect_crl: bool, - only_contains_attribute_certs: bool, - ) -> None: - if full_name is not None: - full_name = list(full_name) - - if only_some_reasons and ( - not isinstance(only_some_reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) - ): - raise TypeError( - "only_some_reasons must be None or frozenset of ReasonFlags" - ) - - if only_some_reasons and ( - ReasonFlags.unspecified in only_some_reasons - or ReasonFlags.remove_from_crl in only_some_reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in an " - "IssuingDistributionPoint" - ) - - if not ( - isinstance(only_contains_user_certs, bool) - and isinstance(only_contains_ca_certs, bool) - and isinstance(indirect_crl, bool) - and isinstance(only_contains_attribute_certs, bool) - ): - raise TypeError( - "only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl and only_contains_attribute_certs " - "must all be boolean." - ) - - # Per RFC5280 Section 5.2.5, the Issuing Distribution Point extension - # in a CRL can have only one of onlyContainsUserCerts, - # onlyContainsCACerts, onlyContainsAttributeCerts set to TRUE. - crl_constraints = [ - only_contains_user_certs, - only_contains_ca_certs, - only_contains_attribute_certs, - ] - - if len([x for x in crl_constraints if x]) > 1: - raise ValueError( - "Only one of the following can be set to True: " - "only_contains_user_certs, only_contains_ca_certs, " - "only_contains_attribute_certs" - ) - - if not any( - [ - only_contains_user_certs, - only_contains_ca_certs, - indirect_crl, - only_contains_attribute_certs, - full_name, - relative_name, - only_some_reasons, - ] - ): - raise ValueError( - "Cannot create empty extension: " - "if only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl, and only_contains_attribute_certs are all False" - ", then either full_name, relative_name, or only_some_reasons " - "must have a value." - ) - - self._only_contains_user_certs = only_contains_user_certs - self._only_contains_ca_certs = only_contains_ca_certs - self._indirect_crl = indirect_crl - self._only_contains_attribute_certs = only_contains_attribute_certs - self._only_some_reasons = only_some_reasons - self._full_name = full_name - self._relative_name = relative_name - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuingDistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.only_contains_user_certs == other.only_contains_user_certs - and self.only_contains_ca_certs == other.only_contains_ca_certs - and self.only_some_reasons == other.only_some_reasons - and self.indirect_crl == other.indirect_crl - and self.only_contains_attribute_certs - == other.only_contains_attribute_certs - ) - - def __hash__(self) -> int: - return hash( - ( - self.full_name, - self.relative_name, - self.only_contains_user_certs, - self.only_contains_ca_certs, - self.only_some_reasons, - self.indirect_crl, - self.only_contains_attribute_certs, - ) - ) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def only_contains_user_certs(self) -> bool: - return self._only_contains_user_certs - - @property - def only_contains_ca_certs(self) -> bool: - return self._only_contains_ca_certs - - @property - def only_some_reasons( - self, - ) -> frozenset[ReasonFlags] | None: - return self._only_some_reasons - - @property - def indirect_crl(self) -> bool: - return self._indirect_crl - - @property - def only_contains_attribute_certs(self) -> bool: - return self._only_contains_attribute_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class MSCertificateTemplate(ExtensionType): - oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE - - def __init__( - self, - template_id: ObjectIdentifier, - major_version: int | None, - minor_version: int | None, - ) -> None: - if not isinstance(template_id, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._template_id = template_id - if ( - major_version is not None and not isinstance(major_version, int) - ) or ( - minor_version is not None and not isinstance(minor_version, int) - ): - raise TypeError( - "major_version and minor_version must be integers or None" - ) - self._major_version = major_version - self._minor_version = minor_version - - @property - def template_id(self) -> ObjectIdentifier: - return self._template_id - - @property - def major_version(self) -> int | None: - return self._major_version - - @property - def minor_version(self) -> int | None: - return self._minor_version - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, MSCertificateTemplate): - return NotImplemented - - return ( - self.template_id == other.template_id - and self.major_version == other.major_version - and self.minor_version == other.minor_version - ) - - def __hash__(self) -> int: - return hash((self.template_id, self.major_version, self.minor_version)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class NamingAuthority: - def __init__( - self, - id: ObjectIdentifier | None, - url: str | None, - text: str | None, - ) -> None: - if id is not None and not isinstance(id, ObjectIdentifier): - raise TypeError("id must be an ObjectIdentifier") - - if url is not None and not isinstance(url, str): - raise TypeError("url must be a str") - - if text is not None and not isinstance(text, str): - raise TypeError("text must be a str") - - self._id = id - self._url = url - self._text = text - - @property - def id(self) -> ObjectIdentifier | None: - return self._id - - @property - def url(self) -> str | None: - return self._url - - @property - def text(self) -> str | None: - return self._text - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NamingAuthority): - return NotImplemented - - return ( - self.id == other.id - and self.url == other.url - and self.text == other.text - ) - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.url, - self.text, - ) - ) - - -class ProfessionInfo: - def __init__( - self, - naming_authority: NamingAuthority | None, - profession_items: Iterable[str], - profession_oids: Iterable[ObjectIdentifier] | None, - registration_number: str | None, - add_profession_info: bytes | None, - ) -> None: - if naming_authority is not None and not isinstance( - naming_authority, NamingAuthority - ): - raise TypeError("naming_authority must be a NamingAuthority") - - profession_items = list(profession_items) - if not all(isinstance(item, str) for item in profession_items): - raise TypeError( - "Every item in the profession_items list must be a str" - ) - - if profession_oids is not None: - profession_oids = list(profession_oids) - if not all( - isinstance(oid, ObjectIdentifier) for oid in profession_oids - ): - raise TypeError( - "Every item in the profession_oids list must be an " - "ObjectIdentifier" - ) - - if registration_number is not None and not isinstance( - registration_number, str - ): - raise TypeError("registration_number must be a str") - - if add_profession_info is not None and not isinstance( - add_profession_info, bytes - ): - raise TypeError("add_profession_info must be bytes") - - self._naming_authority = naming_authority - self._profession_items = profession_items - self._profession_oids = profession_oids - self._registration_number = registration_number - self._add_profession_info = add_profession_info - - @property - def naming_authority(self) -> NamingAuthority | None: - return self._naming_authority - - @property - def profession_items(self) -> list[str]: - return self._profession_items - - @property - def profession_oids(self) -> list[ObjectIdentifier] | None: - return self._profession_oids - - @property - def registration_number(self) -> str | None: - return self._registration_number - - @property - def add_profession_info(self) -> bytes | None: - return self._add_profession_info - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ProfessionInfo): - return NotImplemented - - return ( - self.naming_authority == other.naming_authority - and self.profession_items == other.profession_items - and self.profession_oids == other.profession_oids - and self.registration_number == other.registration_number - and self.add_profession_info == other.add_profession_info - ) - - def __hash__(self) -> int: - if self.profession_oids is not None: - profession_oids = tuple(self.profession_oids) - else: - profession_oids = None - return hash( - ( - self.naming_authority, - tuple(self.profession_items), - profession_oids, - self.registration_number, - self.add_profession_info, - ) - ) - - -class Admission: - def __init__( - self, - admission_authority: GeneralName | None, - naming_authority: NamingAuthority | None, - profession_infos: Iterable[ProfessionInfo], - ) -> None: - if admission_authority is not None and not isinstance( - admission_authority, GeneralName - ): - raise TypeError("admission_authority must be a GeneralName") - - if naming_authority is not None and not isinstance( - naming_authority, NamingAuthority - ): - raise TypeError("naming_authority must be a NamingAuthority") - - profession_infos = list(profession_infos) - if not all( - isinstance(info, ProfessionInfo) for info in profession_infos - ): - raise TypeError( - "Every item in the profession_infos list must be a " - "ProfessionInfo" - ) - - self._admission_authority = admission_authority - self._naming_authority = naming_authority - self._profession_infos = profession_infos - - @property - def admission_authority(self) -> GeneralName | None: - return self._admission_authority - - @property - def naming_authority(self) -> NamingAuthority | None: - return self._naming_authority - - @property - def profession_infos(self) -> list[ProfessionInfo]: - return self._profession_infos - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Admission): - return NotImplemented - - return ( - self.admission_authority == other.admission_authority - and self.naming_authority == other.naming_authority - and self.profession_infos == other.profession_infos - ) - - def __hash__(self) -> int: - return hash( - ( - self.admission_authority, - self.naming_authority, - tuple(self.profession_infos), - ) - ) - - -class Admissions(ExtensionType): - oid = ExtensionOID.ADMISSIONS - - def __init__( - self, - authority: GeneralName | None, - admissions: Iterable[Admission], - ) -> None: - if authority is not None and not isinstance(authority, GeneralName): - raise TypeError("authority must be a GeneralName") - - admissions = list(admissions) - if not all( - isinstance(admission, Admission) for admission in admissions - ): - raise TypeError( - "Every item in the contents_of_admissions list must be an " - "Admission" - ) - - self._authority = authority - self._admissions = admissions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_admissions") - - @property - def authority(self) -> GeneralName | None: - return self._authority - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Admissions): - return NotImplemented - - return ( - self.authority == other.authority - and self._admissions == other._admissions - ) - - def __hash__(self) -> int: - return hash((self.authority, tuple(self._admissions))) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class UnrecognizedExtension(ExtensionType): - def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._oid = oid - self._value = value - - @property - def oid(self) -> ObjectIdentifier: # type: ignore[override] - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UnrecognizedExtension): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def public_bytes(self) -> bytes: - return self.value diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/general_name.py b/venv/lib/python3.10/site-packages/cryptography/x509/general_name.py deleted file mode 100644 index 672f287..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/general_name.py +++ /dev/null @@ -1,281 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import ipaddress -import typing -from email.utils import parseaddr - -from cryptography.x509.name import Name -from cryptography.x509.oid import ObjectIdentifier - -_IPAddressTypes = typing.Union[ - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, -] - - -class UnsupportedGeneralNameType(Exception): - pass - - -class GeneralName(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def value(self) -> typing.Any: - """ - Return the value of the object - """ - - -class RFC822Name(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "RFC822Name values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - name, address = parseaddr(value) - if name or not address: - # parseaddr has found a name (e.g. Name ) or the entire - # value is an empty string. - raise ValueError("Invalid rfc822name value") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> RFC822Name: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RFC822Name): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DNSName(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "DNSName values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> DNSName: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DNSName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class UniformResourceIdentifier(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "URI values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UniformResourceIdentifier): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DirectoryName(GeneralName): - def __init__(self, value: Name) -> None: - if not isinstance(value, Name): - raise TypeError("value must be a Name") - - self._value = value - - @property - def value(self) -> Name: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DirectoryName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class RegisteredID(GeneralName): - def __init__(self, value: ObjectIdentifier) -> None: - if not isinstance(value, ObjectIdentifier): - raise TypeError("value must be an ObjectIdentifier") - - self._value = value - - @property - def value(self) -> ObjectIdentifier: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RegisteredID): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class IPAddress(GeneralName): - def __init__(self, value: _IPAddressTypes) -> None: - if not isinstance( - value, - ( - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, - ), - ): - raise TypeError( - "value must be an instance of ipaddress.IPv4Address, " - "ipaddress.IPv6Address, ipaddress.IPv4Network, or " - "ipaddress.IPv6Network" - ) - - self._value = value - - @property - def value(self) -> _IPAddressTypes: - return self._value - - def _packed(self) -> bytes: - if isinstance( - self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) - ): - return self.value.packed - else: - return ( - self.value.network_address.packed + self.value.netmask.packed - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IPAddress): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class OtherName(GeneralName): - def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: - if not isinstance(type_id, ObjectIdentifier): - raise TypeError("type_id must be an ObjectIdentifier") - if not isinstance(value, bytes): - raise TypeError("value must be a binary string") - - self._type_id = type_id - self._value = value - - @property - def type_id(self) -> ObjectIdentifier: - return self._type_id - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OtherName): - return NotImplemented - - return self.type_id == other.type_id and self.value == other.value - - def __hash__(self) -> int: - return hash((self.type_id, self.value)) diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/name.py b/venv/lib/python3.10/site-packages/cryptography/x509/name.py deleted file mode 100644 index 5f82781..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/name.py +++ /dev/null @@ -1,477 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import re -import sys -import typing -import warnings -from collections.abc import Iterable, Iterator - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.oid import NameOID, ObjectIdentifier - - -class _ASN1Type(utils.Enum): - BitString = 3 - OctetString = 4 - UTF8String = 12 - NumericString = 18 - PrintableString = 19 - T61String = 20 - IA5String = 22 - UTCTime = 23 - GeneralizedTime = 24 - VisibleString = 26 - UniversalString = 28 - BMPString = 30 - - -_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} -_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { - NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, - NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, - NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, - NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, -} - -# Type alias -_OidNameMap = typing.Mapping[ObjectIdentifier, str] -_NameOidMap = typing.Mapping[str, ObjectIdentifier] - -#: Short attribute names from RFC 4514: -#: https://tools.ietf.org/html/rfc4514#page-7 -_NAMEOID_TO_NAME: _OidNameMap = { - NameOID.COMMON_NAME: "CN", - NameOID.LOCALITY_NAME: "L", - NameOID.STATE_OR_PROVINCE_NAME: "ST", - NameOID.ORGANIZATION_NAME: "O", - NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", - NameOID.COUNTRY_NAME: "C", - NameOID.STREET_ADDRESS: "STREET", - NameOID.DOMAIN_COMPONENT: "DC", - NameOID.USER_ID: "UID", -} -_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} - -_NAMEOID_LENGTH_LIMIT = { - NameOID.COUNTRY_NAME: (2, 2), - NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), - NameOID.COMMON_NAME: (1, 64), -} - - -def _escape_dn_value(val: str | bytes) -> str: - """Escape special characters in RFC4514 Distinguished Name value.""" - - if not val: - return "" - - # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character - # followed by the hexadecimal encoding of the octets. - if isinstance(val, bytes): - return "#" + binascii.hexlify(val).decode("utf8") - - # See https://tools.ietf.org/html/rfc4514#section-2.4 - val = val.replace("\\", "\\\\") - val = val.replace('"', '\\"') - val = val.replace("+", "\\+") - val = val.replace(",", "\\,") - val = val.replace(";", "\\;") - val = val.replace("<", "\\<") - val = val.replace(">", "\\>") - val = val.replace("\0", "\\00") - - if val[0] in ("#", " "): - val = "\\" + val - if val[-1] == " ": - val = val[:-1] + "\\ " - - return val - - -def _unescape_dn_value(val: str) -> str: - if not val: - return "" - - # See https://tools.ietf.org/html/rfc4514#section-3 - - # special = escaped / SPACE / SHARP / EQUALS - # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE - def sub(m): - val = m.group(1) - # Regular escape - if len(val) == 1: - return val - # Hex-value scape - return chr(int(val, 16)) - - return _RFC4514NameParser._PAIR_RE.sub(sub, val) - - -NameAttributeValueType = typing.TypeVar( - "NameAttributeValueType", - typing.Union[str, bytes], - str, - bytes, - covariant=True, -) - - -class NameAttribute(typing.Generic[NameAttributeValueType]): - def __init__( - self, - oid: ObjectIdentifier, - value: NameAttributeValueType, - _type: _ASN1Type | None = None, - *, - _validate: bool = True, - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - if _type == _ASN1Type.BitString: - if oid != NameOID.X500_UNIQUE_IDENTIFIER: - raise TypeError( - "oid must be X500_UNIQUE_IDENTIFIER for BitString type." - ) - if not isinstance(value, bytes): - raise TypeError("value must be bytes for BitString") - else: - if not isinstance(value, str): - raise TypeError("value argument must be a str") - - length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) - if length_limits is not None: - min_length, max_length = length_limits - assert isinstance(value, str) - c_len = len(value.encode("utf8")) - if c_len < min_length or c_len > max_length: - msg = ( - f"Attribute's length must be >= {min_length} and " - f"<= {max_length}, but it was {c_len}" - ) - if _validate is True: - raise ValueError(msg) - else: - warnings.warn(msg, stacklevel=2) - - # The appropriate ASN1 string type varies by OID and is defined across - # multiple RFCs including 2459, 3280, and 5280. In general UTF8String - # is preferred (2459), but 3280 and 5280 specify several OIDs with - # alternate types. This means when we see the sentinel value we need - # to look up whether the OID has a non-UTF8 type. If it does, set it - # to that. Otherwise, UTF8! - if _type is None: - _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) - - if not isinstance(_type, _ASN1Type): - raise TypeError("_type must be from the _ASN1Type enum") - - self._oid = oid - self._value: NameAttributeValueType = value - self._type: _ASN1Type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> NameAttributeValueType: - return self._value - - @property - def rfc4514_attribute_name(self) -> str: - """ - The short attribute name (for example "CN") if available, - otherwise the OID dotted string. - """ - return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Use short attribute name if available, otherwise fall back to OID - dotted string. - """ - attr_name = ( - attr_name_overrides.get(self.oid) if attr_name_overrides else None - ) - if attr_name is None: - attr_name = self.rfc4514_attribute_name - - return f"{attr_name}={_escape_dn_value(self.value)}" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameAttribute): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def __repr__(self) -> str: - return f"" - - -class RelativeDistinguishedName: - def __init__(self, attributes: Iterable[NameAttribute]): - attributes = list(attributes) - if not attributes: - raise ValueError("a relative distinguished name cannot be empty") - if not all(isinstance(x, NameAttribute) for x in attributes): - raise TypeError("attributes must be an iterable of NameAttribute") - - # Keep list and frozenset to preserve attribute order where it matters - self._attributes = attributes - self._attribute_set = frozenset(attributes) - - if len(self._attribute_set) != len(attributes): - raise ValueError("duplicate attributes are not allowed") - - def get_attributes_for_oid( - self, - oid: ObjectIdentifier, - ) -> list[NameAttribute[str | bytes]]: - return [i for i in self if i.oid == oid] - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Within each RDN, attributes are joined by '+', although that is rarely - used in certificates. - """ - return "+".join( - attr.rfc4514_string(attr_name_overrides) - for attr in self._attributes - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RelativeDistinguishedName): - return NotImplemented - - return self._attribute_set == other._attribute_set - - def __hash__(self) -> int: - return hash(self._attribute_set) - - def __iter__(self) -> Iterator[NameAttribute]: - return iter(self._attributes) - - def __len__(self) -> int: - return len(self._attributes) - - def __repr__(self) -> str: - return f"" - - -class Name: - @typing.overload - def __init__(self, attributes: Iterable[NameAttribute]) -> None: ... - - @typing.overload - def __init__( - self, attributes: Iterable[RelativeDistinguishedName] - ) -> None: ... - - def __init__( - self, - attributes: Iterable[NameAttribute | RelativeDistinguishedName], - ) -> None: - attributes = list(attributes) - if all(isinstance(x, NameAttribute) for x in attributes): - self._attributes = [ - RelativeDistinguishedName([typing.cast(NameAttribute, x)]) - for x in attributes - ] - elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): - self._attributes = typing.cast( - typing.List[RelativeDistinguishedName], attributes - ) - else: - raise TypeError( - "attributes must be a list of NameAttribute" - " or a list RelativeDistinguishedName" - ) - - @classmethod - def from_rfc4514_string( - cls, - data: str, - attr_name_overrides: _NameOidMap | None = None, - ) -> Name: - return _RFC4514NameParser(data, attr_name_overrides or {}).parse() - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - For example 'CN=foobar.com,O=Foo Corp,C=US' - - An X.509 name is a two-level structure: a list of sets of attributes. - Each list element is separated by ',' and within each list element, set - elements are separated by '+'. The latter is almost never used in - real world certificates. According to RFC4514 section 2.1 the - RDNSequence must be reversed when converting to string representation. - """ - return ",".join( - attr.rfc4514_string(attr_name_overrides) - for attr in reversed(self._attributes) - ) - - def get_attributes_for_oid( - self, - oid: ObjectIdentifier, - ) -> list[NameAttribute[str | bytes]]: - return [i for i in self if i.oid == oid] - - @property - def rdns(self) -> list[RelativeDistinguishedName]: - return self._attributes - - def public_bytes(self, backend: typing.Any = None) -> bytes: - return rust_x509.encode_name_bytes(self) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Name): - return NotImplemented - - return self._attributes == other._attributes - - def __hash__(self) -> int: - # TODO: this is relatively expensive, if this looks like a bottleneck - # for you, consider optimizing! - return hash(tuple(self._attributes)) - - def __iter__(self) -> Iterator[NameAttribute]: - for rdn in self._attributes: - yield from rdn - - def __len__(self) -> int: - return sum(len(rdn) for rdn in self._attributes) - - def __repr__(self) -> str: - rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) - return f"" - - -class _RFC4514NameParser: - _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") - _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") - - _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" - _PAIR_RE = re.compile(_PAIR) - _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" - _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" - _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" - _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" - _STRING_RE = re.compile( - rf""" - ( - ({_LEADCHAR}|{_PAIR}) - ( - ({_STRINGCHAR}|{_PAIR})* - ({_TRAILCHAR}|{_PAIR}) - )? - )? - """, - re.VERBOSE, - ) - _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") - - def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: - self._data = data - self._idx = 0 - - self._attr_name_overrides = attr_name_overrides - - def _has_data(self) -> bool: - return self._idx < len(self._data) - - def _peek(self) -> str | None: - if self._has_data(): - return self._data[self._idx] - return None - - def _read_char(self, ch: str) -> None: - if self._peek() != ch: - raise ValueError - self._idx += 1 - - def _read_re(self, pat) -> str: - match = pat.match(self._data, pos=self._idx) - if match is None: - raise ValueError - val = match.group() - self._idx += len(val) - return val - - def parse(self) -> Name: - """ - Parses the `data` string and converts it to a Name. - - According to RFC4514 section 2.1 the RDNSequence must be - reversed when converting to string representation. So, when - we parse it, we need to reverse again to get the RDNs on the - correct order. - """ - - if not self._has_data(): - return Name([]) - - rdns = [self._parse_rdn()] - - while self._has_data(): - self._read_char(",") - rdns.append(self._parse_rdn()) - - return Name(reversed(rdns)) - - def _parse_rdn(self) -> RelativeDistinguishedName: - nas = [self._parse_na()] - while self._peek() == "+": - self._read_char("+") - nas.append(self._parse_na()) - - return RelativeDistinguishedName(nas) - - def _parse_na(self) -> NameAttribute: - try: - oid_value = self._read_re(self._OID_RE) - except ValueError: - name = self._read_re(self._DESCR_RE) - oid = self._attr_name_overrides.get( - name, _NAME_TO_NAMEOID.get(name) - ) - if oid is None: - raise ValueError - else: - oid = ObjectIdentifier(oid_value) - - self._read_char("=") - if self._peek() == "#": - value = self._read_re(self._HEXSTRING_RE) - value = binascii.unhexlify(value[1:]).decode() - else: - raw_value = self._read_re(self._STRING_RE) - value = _unescape_dn_value(raw_value) - - return NameAttribute(oid, value) diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/ocsp.py b/venv/lib/python3.10/site-packages/cryptography/x509/ocsp.py deleted file mode 100644 index f61ed80..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/ocsp.py +++ /dev/null @@ -1,379 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import datetime -from collections.abc import Iterable - -from cryptography import utils, x509 -from cryptography.hazmat.bindings._rust import ocsp -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, -) -from cryptography.x509.base import _reject_duplicate_extension - - -class OCSPResponderEncoding(utils.Enum): - HASH = "By Hash" - NAME = "By Name" - - -class OCSPResponseStatus(utils.Enum): - SUCCESSFUL = 0 - MALFORMED_REQUEST = 1 - INTERNAL_ERROR = 2 - TRY_LATER = 3 - SIG_REQUIRED = 5 - UNAUTHORIZED = 6 - - -_ALLOWED_HASHES = ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -) - - -def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: - if not isinstance(algorithm, _ALLOWED_HASHES): - raise ValueError( - "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" - ) - - -class OCSPCertStatus(utils.Enum): - GOOD = 0 - REVOKED = 1 - UNKNOWN = 2 - - -class _SingleResponse: - def __init__( - self, - resp: tuple[x509.Certificate, x509.Certificate] | None, - resp_hash: tuple[bytes, bytes, int] | None, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ): - _verify_algorithm(algorithm) - if not isinstance(this_update, datetime.datetime): - raise TypeError("this_update must be a datetime object") - if next_update is not None and not isinstance( - next_update, datetime.datetime - ): - raise TypeError("next_update must be a datetime object or None") - - self._resp = resp - self._resp_hash = resp_hash - self._algorithm = algorithm - self._this_update = this_update - self._next_update = next_update - - if not isinstance(cert_status, OCSPCertStatus): - raise TypeError( - "cert_status must be an item from the OCSPCertStatus enum" - ) - if cert_status is not OCSPCertStatus.REVOKED: - if revocation_time is not None: - raise ValueError( - "revocation_time can only be provided if the certificate " - "is revoked" - ) - if revocation_reason is not None: - raise ValueError( - "revocation_reason can only be provided if the certificate" - " is revoked" - ) - else: - if not isinstance(revocation_time, datetime.datetime): - raise TypeError("revocation_time must be a datetime object") - - if revocation_reason is not None and not isinstance( - revocation_reason, x509.ReasonFlags - ): - raise TypeError( - "revocation_reason must be an item from the ReasonFlags " - "enum or None" - ) - - self._cert_status = cert_status - self._revocation_time = revocation_time - self._revocation_reason = revocation_reason - - -OCSPRequest = ocsp.OCSPRequest -OCSPResponse = ocsp.OCSPResponse -OCSPSingleResponse = ocsp.OCSPSingleResponse - - -class OCSPRequestBuilder: - def __init__( - self, - request: tuple[ - x509.Certificate, x509.Certificate, hashes.HashAlgorithm - ] - | None = None, - request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] - | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ) -> None: - self._request = request - self._request_hash = request_hash - self._extensions = extensions - - def add_certificate( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - _verify_algorithm(algorithm) - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - return OCSPRequestBuilder( - (cert, issuer, algorithm), self._request_hash, self._extensions - ) - - def add_certificate_by_hash( - self, - issuer_name_hash: bytes, - issuer_key_hash: bytes, - serial_number: int, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - if not isinstance(serial_number, int): - raise TypeError("serial_number must be an integer") - - _verify_algorithm(algorithm) - utils._check_bytes("issuer_name_hash", issuer_name_hash) - utils._check_bytes("issuer_key_hash", issuer_key_hash) - if algorithm.digest_size != len( - issuer_name_hash - ) or algorithm.digest_size != len(issuer_key_hash): - raise ValueError( - "issuer_name_hash and issuer_key_hash must be the same length " - "as the digest size of the algorithm" - ) - - return OCSPRequestBuilder( - self._request, - (issuer_name_hash, issuer_key_hash, serial_number, algorithm), - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPRequestBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPRequestBuilder( - self._request, self._request_hash, [*self._extensions, extension] - ) - - def build(self) -> OCSPRequest: - if self._request is None and self._request_hash is None: - raise ValueError("You must add a certificate before building") - - return ocsp.create_ocsp_request(self) - - -class OCSPResponseBuilder: - def __init__( - self, - response: _SingleResponse | None = None, - responder_id: tuple[x509.Certificate, OCSPResponderEncoding] - | None = None, - certs: list[x509.Certificate] | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ): - self._response = response - self._responder_id = responder_id - self._certs = certs - self._extensions = extensions - - def add_response( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ) -> OCSPResponseBuilder: - if self._response is not None: - raise ValueError("Only one response per OCSPResponse.") - - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - singleresp = _SingleResponse( - (cert, issuer), - None, - algorithm, - cert_status, - this_update, - next_update, - revocation_time, - revocation_reason, - ) - return OCSPResponseBuilder( - singleresp, - self._responder_id, - self._certs, - self._extensions, - ) - - def add_response_by_hash( - self, - issuer_name_hash: bytes, - issuer_key_hash: bytes, - serial_number: int, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ) -> OCSPResponseBuilder: - if self._response is not None: - raise ValueError("Only one response per OCSPResponse.") - - if not isinstance(serial_number, int): - raise TypeError("serial_number must be an integer") - - utils._check_bytes("issuer_name_hash", issuer_name_hash) - utils._check_bytes("issuer_key_hash", issuer_key_hash) - _verify_algorithm(algorithm) - if algorithm.digest_size != len( - issuer_name_hash - ) or algorithm.digest_size != len(issuer_key_hash): - raise ValueError( - "issuer_name_hash and issuer_key_hash must be the same length " - "as the digest size of the algorithm" - ) - - singleresp = _SingleResponse( - None, - (issuer_name_hash, issuer_key_hash, serial_number), - algorithm, - cert_status, - this_update, - next_update, - revocation_time, - revocation_reason, - ) - return OCSPResponseBuilder( - singleresp, - self._responder_id, - self._certs, - self._extensions, - ) - - def responder_id( - self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate - ) -> OCSPResponseBuilder: - if self._responder_id is not None: - raise ValueError("responder_id can only be set once") - if not isinstance(responder_cert, x509.Certificate): - raise TypeError("responder_cert must be a Certificate") - if not isinstance(encoding, OCSPResponderEncoding): - raise TypeError( - "encoding must be an element from OCSPResponderEncoding" - ) - - return OCSPResponseBuilder( - self._response, - (responder_cert, encoding), - self._certs, - self._extensions, - ) - - def certificates( - self, certs: Iterable[x509.Certificate] - ) -> OCSPResponseBuilder: - if self._certs is not None: - raise ValueError("certificates may only be set once") - certs = list(certs) - if len(certs) == 0: - raise ValueError("certs must not be an empty list") - if not all(isinstance(x, x509.Certificate) for x in certs): - raise TypeError("certs must be a list of Certificates") - return OCSPResponseBuilder( - self._response, - self._responder_id, - certs, - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPResponseBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPResponseBuilder( - self._response, - self._responder_id, - self._certs, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: hashes.HashAlgorithm | None, - ) -> OCSPResponse: - if self._response is None: - raise ValueError("You must add a response before signing") - if self._responder_id is None: - raise ValueError("You must add a responder_id before signing") - - return ocsp.create_ocsp_response( - OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm - ) - - @classmethod - def build_unsuccessful( - cls, response_status: OCSPResponseStatus - ) -> OCSPResponse: - if not isinstance(response_status, OCSPResponseStatus): - raise TypeError( - "response_status must be an item from OCSPResponseStatus" - ) - if response_status is OCSPResponseStatus.SUCCESSFUL: - raise ValueError("response_status cannot be SUCCESSFUL") - - return ocsp.create_ocsp_response(response_status, None, None, None) - - -load_der_ocsp_request = ocsp.load_der_ocsp_request -load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/oid.py b/venv/lib/python3.10/site-packages/cryptography/x509/oid.py deleted file mode 100644 index 520fc7a..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/oid.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat._oid import ( - AttributeOID, - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - OCSPExtensionOID, - OtherNameFormOID, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, - SubjectInformationAccessOID, -) - -__all__ = [ - "AttributeOID", - "AuthorityInformationAccessOID", - "CRLEntryExtensionOID", - "CertificatePoliciesOID", - "ExtendedKeyUsageOID", - "ExtensionOID", - "NameOID", - "OCSPExtensionOID", - "ObjectIdentifier", - "OtherNameFormOID", - "PublicKeyAlgorithmOID", - "SignatureAlgorithmOID", - "SubjectInformationAccessOID", -] diff --git a/venv/lib/python3.10/site-packages/cryptography/x509/verification.py b/venv/lib/python3.10/site-packages/cryptography/x509/verification.py deleted file mode 100644 index 2db4324..0000000 --- a/venv/lib/python3.10/site-packages/cryptography/x509/verification.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.general_name import DNSName, IPAddress - -__all__ = [ - "ClientVerifier", - "Criticality", - "ExtensionPolicy", - "Policy", - "PolicyBuilder", - "ServerVerifier", - "Store", - "Subject", - "VerificationError", - "VerifiedClient", -] - -Store = rust_x509.Store -Subject = typing.Union[DNSName, IPAddress] -VerifiedClient = rust_x509.VerifiedClient -ClientVerifier = rust_x509.ClientVerifier -ServerVerifier = rust_x509.ServerVerifier -PolicyBuilder = rust_x509.PolicyBuilder -Policy = rust_x509.Policy -ExtensionPolicy = rust_x509.ExtensionPolicy -Criticality = rust_x509.Criticality -VerificationError = rust_x509.VerificationError diff --git a/venv/lib/python3.10/site-packages/distutils-precedence.pth b/venv/lib/python3.10/site-packages/distutils-precedence.pth deleted file mode 100644 index 6de4198..0000000 --- a/venv/lib/python3.10/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/METADATA deleted file mode 100644 index 42ce213..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/METADATA +++ /dev/null @@ -1,89 +0,0 @@ -Metadata-Version: 2.4 -Name: Flask -Version: 3.1.1 -Summary: A simple framework for building complex web applications. -Maintainer-email: Pallets -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: blinker>=1.9.0 -Requires-Dist: click>=8.1.3 -Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10' -Requires-Dist: itsdangerous>=2.2.0 -Requires-Dist: jinja2>=3.1.2 -Requires-Dist: markupsafe>=2.1.1 -Requires-Dist: werkzeug>=3.1.0 -Requires-Dist: asgiref>=3.2 ; extra == "async" -Requires-Dist: python-dotenv ; extra == "dotenv" -Project-URL: Changes, https://flask.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/flask/ -Provides-Extra: async -Provides-Extra: dotenv - -# Flask - -Flask is a lightweight [WSGI] web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around [Werkzeug] -and [Jinja], and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -[WSGI]: https://wsgi.readthedocs.io/ -[Werkzeug]: https://werkzeug.palletsprojects.com/ -[Jinja]: https://jinja.palletsprojects.com/ - -## A Simple Example - -```python -# save this as app.py -from flask import Flask - -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello, World!" -``` - -``` -$ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) -``` - -## Donate - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/RECORD deleted file mode 100644 index 2c1f5e9..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -../../../bin/flask,sha256=7g67WvJDpoXU2vQuIagZVxeKhcQLmOa54r9T5hxVMYw,235 -flask-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask-3.1.1.dist-info/METADATA,sha256=EsnOyVfBXjw1BkGn-9lrI5mcv1NwYyB4_CfdW2FSDZQ,3014 -flask-3.1.1.dist-info/RECORD,, -flask-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask-3.1.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -flask-3.1.1.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40 -flask-3.1.1.dist-info/licenses/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask/__init__.py,sha256=mHvJN9Swtl1RDtjCqCIYyIniK_SZ_l_hqUynOzgpJ9o,2701 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-310.pyc,, -flask/__pycache__/__main__.cpython-310.pyc,, -flask/__pycache__/app.cpython-310.pyc,, -flask/__pycache__/blueprints.cpython-310.pyc,, -flask/__pycache__/cli.cpython-310.pyc,, -flask/__pycache__/config.cpython-310.pyc,, -flask/__pycache__/ctx.cpython-310.pyc,, -flask/__pycache__/debughelpers.cpython-310.pyc,, -flask/__pycache__/globals.cpython-310.pyc,, -flask/__pycache__/helpers.cpython-310.pyc,, -flask/__pycache__/logging.cpython-310.pyc,, -flask/__pycache__/sessions.cpython-310.pyc,, -flask/__pycache__/signals.cpython-310.pyc,, -flask/__pycache__/templating.cpython-310.pyc,, -flask/__pycache__/testing.cpython-310.pyc,, -flask/__pycache__/typing.cpython-310.pyc,, -flask/__pycache__/views.cpython-310.pyc,, -flask/__pycache__/wrappers.cpython-310.pyc,, -flask/app.py,sha256=XGqgFRsLgBhzIoB2HSftoMTIM3hjDiH6rdV7c3g3IKc,61744 -flask/blueprints.py,sha256=p5QE2lY18GItbdr_RKRpZ8Do17g0PvQGIgZkSUDhX2k,4541 -flask/cli.py,sha256=Pfh72-BxlvoH0QHCDOc1HvXG7Kq5Xetf3zzNz2kNSHk,37184 -flask/config.py,sha256=PiqF0DPam6HW0FH4CH1hpXTBe30NSzjPEOwrz1b6kt0,13219 -flask/ctx.py,sha256=4atDhJJ_cpV1VMq4qsfU4E_61M1oN93jlS2H9gjrl58,15120 -flask/debughelpers.py,sha256=PGIDhStW_efRjpaa3zHIpo-htStJOR41Ip3OJWPYBwo,6080 -flask/globals.py,sha256=XdQZmStBmPIs8t93tjx6pO7Bm3gobAaONWkFcUHaGas,1713 -flask/helpers.py,sha256=7njmzkFJvrPSQudsgONsgQzaGrGppeBINevKgWescPk,23521 -flask/json/__init__.py,sha256=hLNR898paqoefdeAhraa5wyJy-bmRB2k2dV4EgVy2Z8,5602 -flask/json/__pycache__/__init__.cpython-310.pyc,, -flask/json/__pycache__/provider.cpython-310.pyc,, -flask/json/__pycache__/tag.cpython-310.pyc,, -flask/json/provider.py,sha256=5imEzY5HjV2HoUVrQbJLqXCzMNpZXfD0Y1XqdLV2XBA,7672 -flask/json/tag.py,sha256=DhaNwuIOhdt2R74oOC9Y4Z8ZprxFYiRb5dUP5byyINw,9281 -flask/logging.py,sha256=8sM3WMTubi1cBb2c_lPkWpN0J8dMAqrgKRYLLi1dCVI,2377 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/sansio/README.md,sha256=-0X1tECnilmz1cogx-YhNw5d7guK7GKrq_DEV2OzlU0,228 -flask/sansio/__pycache__/app.cpython-310.pyc,, -flask/sansio/__pycache__/blueprints.cpython-310.pyc,, -flask/sansio/__pycache__/scaffold.cpython-310.pyc,, -flask/sansio/app.py,sha256=Wj9NVGtiR1jvkZ9gSFd91usUlM8H0g06aPVz2sMh4bw,38116 -flask/sansio/blueprints.py,sha256=Tqe-7EkZ-tbWchm8iDoCfD848f0_3nLv6NNjeIPvHwM,24637 -flask/sansio/scaffold.py,sha256=q6wM4Y4aYMGGN_Litsj3PYKpBS3Zvut0xhDmpBEHFdo,30387 -flask/sessions.py,sha256=ED_OV3Jl1emsy7Zntb7aFWxyoynt-PzNY0eFUH-Syo0,15495 -flask/signals.py,sha256=V7lMUww7CqgJ2ThUBn1PiatZtQanOyt7OZpu2GZI-34,750 -flask/templating.py,sha256=2TcXLT85Asflm2W9WOSFxKCmYn5e49w_Jkg9-NaaJWo,7537 -flask/testing.py,sha256=JT9fi_-_qsAXIpC1NSWcp1VrO-QSXgmdq95extfCwEw,10136 -flask/typing.py,sha256=L-L5t2jKgS0aOmVhioQ_ylqcgiVFnA6yxO-RLNhq-GU,3293 -flask/views.py,sha256=xzJx6oJqGElThtEghZN7ZQGMw5TDFyuRxUkecwRuAoA,6962 -flask/wrappers.py,sha256=jUkv4mVek2Iq4hwxd4RvqrIMb69Bv0PElDgWLmd5ORo,9406 diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/WHEEL deleted file mode 100644 index d8b9936..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/entry_points.txt deleted file mode 100644 index eec6733..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask=flask.cli:main - diff --git a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/licenses/LICENSE.txt b/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.10/site-packages/flask-3.1.1.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/flask/__init__.py b/venv/lib/python3.10/site-packages/flask/__init__.py deleted file mode 100644 index 1fdc50c..0000000 --- a/venv/lib/python3.10/site-packages/flask/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import json as json -from .app import Flask as Flask -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import abort as abort -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import redirect as redirect -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string -from .templating import stream_template as stream_template -from .templating import stream_template_string as stream_template_string -from .wrappers import Request as Request -from .wrappers import Response as Response - -if not t.TYPE_CHECKING: - - def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask 3.2. Use feature detection or" - " 'importlib.metadata.version(\"flask\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask") - - raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/flask/__main__.py b/venv/lib/python3.10/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/venv/lib/python3.10/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 56aa524..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 5e09e7f..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/app.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/app.cpython-310.pyc deleted file mode 100644 index a9a8716..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/app.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/blueprints.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/blueprints.cpython-310.pyc deleted file mode 100644 index f22f28f..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/blueprints.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/cli.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/cli.cpython-310.pyc deleted file mode 100644 index 5219c28..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/cli.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/config.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/config.cpython-310.pyc deleted file mode 100644 index 3c2446a..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/config.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/ctx.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/ctx.cpython-310.pyc deleted file mode 100644 index 2f2ffba..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/ctx.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/debughelpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/debughelpers.cpython-310.pyc deleted file mode 100644 index bd49cfd..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/debughelpers.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/globals.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/globals.cpython-310.pyc deleted file mode 100644 index b832fd1..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/globals.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/helpers.cpython-310.pyc deleted file mode 100644 index 39a3f14..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/helpers.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/logging.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/logging.cpython-310.pyc deleted file mode 100644 index 5bea6fe..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/logging.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/sessions.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/sessions.cpython-310.pyc deleted file mode 100644 index 0dc18c6..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/sessions.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/signals.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/signals.cpython-310.pyc deleted file mode 100644 index 6c30edf..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/signals.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/templating.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/templating.cpython-310.pyc deleted file mode 100644 index 5ab7d6f..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/templating.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/testing.cpython-310.pyc deleted file mode 100644 index b76a352..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/testing.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/typing.cpython-310.pyc deleted file mode 100644 index a3e4ba6..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/typing.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/views.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/views.cpython-310.pyc deleted file mode 100644 index 5452d33..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/views.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/__pycache__/wrappers.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/__pycache__/wrappers.cpython-310.pyc deleted file mode 100644 index 7605344..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/__pycache__/wrappers.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/app.py b/venv/lib/python3.10/site-packages/flask/app.py deleted file mode 100644 index 1232b03..0000000 --- a/venv/lib/python3.10/site-packages/flask/app.py +++ /dev/null @@ -1,1536 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import sys -import typing as t -import weakref -from datetime import timedelta -from inspect import iscoroutinefunction -from itertools import chain -from types import TracebackType -from urllib.parse import quote as _url_quote - -import click -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.routing import BuildError -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.serving import is_running_from_reloader -from werkzeug.wrappers import Response as BaseResponse -from werkzeug.wsgi import get_host - -from . import cli -from . import typing as ft -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import g -from .globals import request -from .globals import request_ctx -from .globals import session -from .helpers import get_debug_flag -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import send_from_directory -from .sansio.app import App -from .sansio.scaffold import _sentinel -from .sessions import SecureCookieSessionInterface -from .sessions import SessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - - from .testing import FlaskClient - from .testing import FlaskCliRunner - from .typing import HeadersValue - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(App): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - default_config = ImmutableDict( - { - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "SECRET_KEY": None, - "SECRET_KEY_FALLBACKS": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "TRUSTED_HOSTS": None, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_PARTITIONED": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "MAX_FORM_MEMORY_SIZE": 500_000, - "MAX_FORM_PARTS": 1_000, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - "PROVIDE_AUTOMATIC_OPTIONS": True, - } - ) - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class: type[Request] = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class: type[Response] = Response - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface: SessionInterface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_url_path=static_url_path, - static_folder=static_folder, - static_host=static_host, - host_matching=host_matching, - subdomain_matching=subdomain_matching, - template_folder=template_folder, - instance_path=instance_path, - instance_relative_config=instance_relative_config, - root_path=root_path, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = cli.AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert bool(static_host) == host_matching, ( - "Invalid static_host/host_matching combination" - ) - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = None - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) - - def open_instance_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to the application's instance folder - :attr:`instance_path`. Unlike :meth:`open_resource`, files in the - instance folder can be opened for writing. - - :param resource: Path to the resource relative to :attr:`instance_path`. - :param mode: Open the file in this mode. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - path = os.path.join(self.instance_path, resource) - - if "b" in mode: - return open(path, mode) - - return open(path, mode, encoding=encoding) - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] - - if auto_reload is None: - auto_reload = self.debug - - options["auto_reload"] = auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=self.url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = self.json.dumps - return rv - - def create_url_adapter(self, request: Request | None) -> MapAdapter | None: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionchanged:: 3.1 - If :data:`SERVER_NAME` is set, it does not restrict requests to - only that domain, for both ``subdomain_matching`` and - ``host_matching``. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - - .. versionchanged:: 0.9 - This can be called outside a request when the URL adapter is created - for an application context. - - .. versionadded:: 0.6 - """ - if request is not None: - if (trusted_hosts := self.config["TRUSTED_HOSTS"]) is not None: - request.trusted_hosts = trusted_hosts - - # Check trusted_hosts here until bind_to_environ does. - request.host = get_host(request.environ, request.trusted_hosts) # pyright: ignore - subdomain = None - server_name = self.config["SERVER_NAME"] - - if self.url_map.host_matching: - # Don't pass SERVER_NAME, otherwise it's used and the actual - # host is ignored, which breaks host matching. - server_name = None - elif not self.subdomain_matching: - # Werkzeug doesn't implement subdomain matching yet. Until then, - # disable it by forcing the current subdomain to the default, or - # the empty string. - subdomain = self.url_map.default_subdomain or "" - - return self.url_map.bind_to_environ( - request.environ, server_name=server_name, subdomain=subdomain - ) - - # Need at least SERVER_NAME to match/build outside a request. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def raise_routing_exception(self, request: Request) -> t.NoReturn: - """Intercept routing exceptions and possibly do something else. - - In debug mode, intercept a routing redirect and replace it with - an error if the body will be discarded. - - With modern Werkzeug this shouldn't occur, since it now uses a - 308 status which tells the browser to resend the method and - body. - - .. versionchanged:: 2.1 - Don't intercept 307 and 308 redirects. - - :meta private: - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.routing_exception.code in {307, 308} - or request.method in {"GET", "HEAD", "OPTIONS"} - ): - raise request.routing_exception # type: ignore[misc] - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def update_template_context(self, context: dict[str, t.Any]) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[str | None] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(self.ensure_sync(func)()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict[str, t.Any]: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - def run( - self, - host: str | None = None, - port: int | None = None, - debug: bool | None = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Ignore this call so that it doesn't start another server if - # the 'flask run' command is used. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - if not is_running_from_reloader(): - click.secho( - " * Ignoring a call to 'app.run()' that would block" - " the current 'flask' CLI command.\n" - " Only call 'app.run()' in an 'if __name__ ==" - ' "__main__"\' guard.', - fg="red", - ) - - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, env var overrides existing value - if "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.debug, self.name) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) # type: ignore - - def handle_http_exception( - self, e: HTTPException - ) -> HTTPException | ft.ResponseReturnValue: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e, request.blueprints) - if handler is None: - return e - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_user_exception( - self, e: Exception - ) -> HTTPException | ft.ResponseReturnValue: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e, request.blueprints) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) - propagate = self.config["PROPAGATE_EXCEPTIONS"] - - if propagate is None: - propagate = self.testing or self.debug - - if propagate: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: InternalServerError | ft.ResponseReturnValue - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error, request.blueprints) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def dispatch_request(self) -> ft.ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = request_ctx.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule: Rule = req.url_rule # type: ignore[assignment] - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] - return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return] - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self._got_first_request = True - - try: - request_started.send(self, _async_wrapper=self.ensure_sync) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: ft.ResponseReturnValue | HTTPException, - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send( - self, _async_wrapper=self.ensure_sync, response=response - ) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = request_ctx.url_adapter - methods = adapter.allowed_methods() # type: ignore[union-attr] - rv = self.response_class() - rv.allow.update(methods) - return rv - - def ensure_sync(self, func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine[t.Any, t.Any, t.Any]] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - return asgiref_async_to_sync(func) - - def url_for( - self, - /, - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, - ) -> str: - """Generate a URL to the given endpoint with the given values. - - This is called by :func:`flask.url_for`, and can be called - directly as well. - - An *endpoint* is the name of a URL rule, usually added with - :meth:`@app.route() `, and usually the same name as the - view function. A route defined in a :class:`~flask.Blueprint` - will prepend the blueprint's name separated by a ``.`` to the - endpoint. - - In some cases, such as email messages, you want URLs to include - the scheme and domain, like ``https://example.com/hello``. When - not in an active request, URLs will be external by default, but - this requires setting :data:`SERVER_NAME` so Flask knows what - domain to use. :data:`APPLICATION_ROOT` and - :data:`PREFERRED_URL_SCHEME` should also be configured as - needed. This config is only used when not in an active request. - - Functions can be decorated with :meth:`url_defaults` to modify - keyword arguments before the URL is built. - - If building fails for some reason, such as an unknown endpoint - or incorrect values, the app's :meth:`handle_url_build_error` - method is called. If that returns a string, that is returned, - otherwise a :exc:`~werkzeug.routing.BuildError` is raised. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it - is external. - :param _external: If given, prefer the URL to be internal - (False) or require it to be external (True). External URLs - include the scheme and domain. When not in an active - request, URLs are external by default. - :param values: Values to use for the variable parts of the URL - rule. Unknown keys are appended as query string arguments, - like ``?a=b&c=d``. - - .. versionadded:: 2.2 - Moved from ``flask.url_for``, which calls this method. - """ - req_ctx = _cv_request.get(None) - - if req_ctx is not None: - url_adapter = req_ctx.url_adapter - blueprint_name = req_ctx.request.blueprint - - # If the endpoint starts with "." and the request matches a - # blueprint, the endpoint is relative to the blueprint. - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - # When in a request, generate a URL without scheme and - # domain by default, unless a scheme is given. - if _external is None: - _external = _scheme is not None - else: - app_ctx = _cv_app.get(None) - - # If called by helpers.url_for, an app context is active, - # use its url_adapter. Otherwise, app.url_for was called - # directly, build an adapter. - if app_ctx is not None: - url_adapter = app_ctx.url_adapter - else: - url_adapter = self.create_url_adapter(None) - - if url_adapter is None: - raise RuntimeError( - "Unable to build URLs outside an active request" - " without 'SERVER_NAME' configured. Also configure" - " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" - " needed." - ) - - # When outside a request, generate a URL with scheme and - # domain by default. - if _external is None: - _external = True - - # It is an error to set _scheme when _external=False, in order - # to avoid accidental insecure URLs. - if _scheme is not None and not _external: - raise ValueError("When specifying '_scheme', '_external' must be True.") - - self.inject_url_defaults(endpoint, values) - - try: - rv = url_adapter.build( # type: ignore[union-attr] - endpoint, - values, - method=_method, - url_scheme=_scheme, - force_external=_external, - ) - except BuildError as error: - values.update( - _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external - ) - return self.handle_url_build_error(error, endpoint, values) - - if _anchor is not None: - _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") - rv = f"{rv}#{_anchor}" - - return rv - - def make_response(self, rv: ft.ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``list`` - A list that will be jsonify'd before being returned. - - ``generator`` or ``iterator`` - A generator that returns ``str`` or ``bytes`` to be - streamed as the response. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 2.2 - A generator will be converted to a streaming response. - A list will be converted to a JSON response. - - .. versionchanged:: 1.1 - A dict will be converted to a JSON response. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status: int | None = None - headers: HeadersValue | None = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv # type: ignore[misc] - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv # pyright: ignore - else: - rv, status = rv # type: ignore[assignment,misc] - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, cabc.Iterator): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class( - rv, # pyright: ignore - status=status, - headers=headers, # type: ignore[arg-type] - ) - status = headers = None - elif isinstance(rv, (dict, list)): - rv = self.json.response(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type( - rv, # type: ignore[arg-type] - request.environ, - ) - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it" - f" was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it was a" - f" {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) - - return rv - - def preprocess_request(self) -> ft.ResponseReturnValue | None: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv # type: ignore[no-any-return] - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = request_ctx._get_current_object() # type: ignore[attr-defined] - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def do_teardown_appcontext( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: WSGIEnvironment) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with app.test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: BaseException | None = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if "werkzeug.debug.preserve_context" in environ: - environ["werkzeug.debug.preserve_context"](_cv_app.get()) - environ["werkzeug.debug.preserve_context"](_cv_request.get()) - - if error is not None and self.should_ignore_error(error): - error = None - - ctx.pop(error) - - def __call__( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/venv/lib/python3.10/site-packages/flask/blueprints.py b/venv/lib/python3.10/site-packages/flask/blueprints.py deleted file mode 100644 index b6d4e43..0000000 --- a/venv/lib/python3.10/site-packages/flask/blueprints.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from datetime import timedelta - -from .cli import AppGroup -from .globals import current_app -from .helpers import send_from_directory -from .sansio.blueprints import Blueprint as SansioBlueprint -from .sansio.blueprints import BlueprintSetupState as BlueprintSetupState # noqa -from .sansio.scaffold import _sentinel - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -class Blueprint(SansioBlueprint): - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore - ) -> None: - super().__init__( - name, - import_name, - static_folder, - static_url_path, - template_folder, - url_prefix, - subdomain, - url_defaults, - root_path, - cli_group, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource( - self, resource: str, mode: str = "rb", encoding: str | None = "utf-8" - ) -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for reading. The - blueprint-relative equivalent of the app's :meth:`~.Flask.open_resource` - method. - - :param resource: Path to the resource relative to :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is supported, - valid values are ``"r"`` (or ``"rt"``) and ``"rb"``. - :param encoding: Open the file with this encoding when opening in text - mode. This is ignored when opening in binary mode. - - .. versionchanged:: 3.1 - Added the ``encoding`` parameter. - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - path = os.path.join(self.root_path, resource) - - if mode == "rb": - return open(path, mode) # pyright: ignore - - return open(path, mode, encoding=encoding) diff --git a/venv/lib/python3.10/site-packages/flask/cli.py b/venv/lib/python3.10/site-packages/flask/cli.py deleted file mode 100644 index ed11f25..0000000 --- a/venv/lib/python3.10/site-packages/flask/cli.py +++ /dev/null @@ -1,1135 +0,0 @@ -from __future__ import annotations - -import ast -import collections.abc as cabc -import importlib.metadata -import inspect -import os -import platform -import re -import sys -import traceback -import typing as t -from functools import update_wrapper -from operator import itemgetter -from types import ModuleType - -import click -from click.core import ParameterSource -from werkzeug import run_simple -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_load_dotenv - -if t.TYPE_CHECKING: - import ssl - - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(module: ModuleType) -> Flask: - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = app_factory() - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory '{attr_name}' in module '{module.__name__}'," - " but could not call it without arguments. Use" - f" '{module.__name__}:{attr_name}(args)'" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify one." - ) - - -def _called_with_wrong_args(f: t.Callable[..., Flask]) -> bool: - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(module: ModuleType, app_name: str) -> Flask: - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = [] - kwargs = {} - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = { - kw.arg: ast.literal_eval(kw.value) - for kw in expr.keywords - if kw.arg is not None - } - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = attr(*args, **kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path: str) -> str: - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[True] = True -) -> Flask: ... - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[False] = ... -) -> Flask | None: ... - - -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: bool = True -) -> Flask | None: - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: # type: ignore[union-attr] - raise NoAppException( - f"While importing {module_name!r}, an ImportError was" - f" raised:\n\n{traceback.format_exc()}" - ) from None - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from None - else: - return None - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(module) - else: - return find_app_by_string(module, app_name) - - -def get_version(ctx: click.Context, param: click.Parameter, value: t.Any) -> None: - if not value or ctx.resilient_parsing: - return - - flask_version = importlib.metadata.version("flask") - werkzeug_version = importlib.metadata.version("werkzeug") - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {flask_version}\n" - f"Werkzeug {werkzeug_version}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the Flask version.", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - - .. versionchanged:: 3.1 - Added the ``load_dotenv_defaults`` parameter and attribute. - """ - - def __init__( - self, - app_import_path: str | None = None, - create_app: t.Callable[..., Flask] | None = None, - set_debug_flag: bool = True, - load_dotenv_defaults: bool = True, - ) -> None: - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data: dict[t.Any, t.Any] = {} - self.set_debug_flag = set_debug_flag - - self.load_dotenv_defaults = get_load_dotenv(load_dotenv_defaults) - """Whether default ``.flaskenv`` and ``.env`` files should be loaded. - - ``ScriptInfo`` doesn't load anything, this is for reference when doing - the load elsewhere during processing. - - .. versionadded:: 3.1 - """ - - self._loaded_app: Flask | None = None - - def load_app(self) -> Flask: - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - if self._loaded_app is not None: - return self._loaded_app - app: Flask | None = None - if self.create_app is not None: - app = self.create_app() - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, maxsplit=1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(import_name, None, raise_if_not_found=False) - - if app is not None: - break - - if app is None: - raise NoAppException( - "Could not locate a Flask application. Use the" - " 'flask --app' option, 'FLASK_APP' environment" - " variable, or a 'wsgi.py' or 'app.py' file in the" - " current directory." - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def with_appcontext(f: F) -> F: - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. - - Custom commands (and their options) registered under ``app.cli`` or - ``blueprint.cli`` will always have an app context available, this - decorator is not required in that case. - - .. versionchanged:: 2.2 - The app context is active for subcommands as well as the - decorated callback. The app context is always available to - ``app.cli`` command and parameter callbacks. - """ - - @click.pass_context - def decorator(ctx: click.Context, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - if not current_app: - app = ctx.ensure_object(ScriptInfo).load_app() - ctx.with_resource(app.app_context()) - - return ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) # type: ignore[return-value] - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Command]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f: t.Callable[..., t.Any]) -> click.Command: - if wrap_for_ctx: - f = with_appcontext(f) - return super(AppGroup, self).command(*args, **kwargs)(f) # type: ignore[no-any-return] - - return decorator - - def group( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Group]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return super().group(*args, **kwargs) # type: ignore[no-any-return] - - -def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: - if value is None: - return None - - info = ctx.ensure_object(ScriptInfo) - info.app_import_path = value - return value - - -# This option is eager so the app will be available if --help is given. -# --help is also eager, so --app must be before it in the param list. -# no_args_is_help bypasses eager processing, so this option must be -# processed manually in that case to ensure FLASK_APP gets picked up. -_app_option = click.Option( - ["-A", "--app"], - metavar="IMPORT", - help=( - "The Flask application or factory function to load, in the form 'module:name'." - " Module can be a dotted import or file path. Name is not required if it is" - " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" - " pass arguments." - ), - is_eager=True, - expose_value=False, - callback=_set_app, -) - - -def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: - # If the flag isn't provided, it will default to False. Don't use - # that, let debug be set by env in that case. - source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] - - if source is not None and source in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ): - return None - - # Set with env var instead of ScriptInfo.load so that it can be - # accessed early during a factory function. - os.environ["FLASK_DEBUG"] = "1" if value else "0" - return value - - -_debug_option = click.Option( - ["--debug/--no-debug"], - help="Set debug mode.", - expose_value=False, - callback=_set_debug, -) - - -def _env_file_callback( - ctx: click.Context, param: click.Option, value: str | None -) -> str | None: - try: - import dotenv # noqa: F401 - except ImportError: - # Only show an error if a value was passed, otherwise we still want to - # call load_dotenv and show a message without exiting. - if value is not None: - raise click.BadParameter( - "python-dotenv must be installed to load an env file.", - ctx=ctx, - param=param, - ) from None - - # Load if a value was passed, or we want to load default files, or both. - if value is not None or ctx.obj.load_dotenv_defaults: - load_dotenv(value, load_defaults=ctx.obj.load_dotenv_defaults) - - return value - - -# This option is eager so env vars are loaded as early as possible to be -# used by other options. -_env_file_option = click.Option( - ["-e", "--env-file"], - type=click.Path(exists=True, dir_okay=False), - help=( - "Load environment variables from this file, taking precedence over" - " those set by '.env' and '.flaskenv'. Variables set directly in the" - " environment take highest precedence. python-dotenv must be installed." - ), - is_eager=True, - expose_value=False, - callback=_env_file_callback, -) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag. - - .. versionchanged:: 3.1 - ``-e path`` takes precedence over default ``.env`` and ``.flaskenv`` files. - - .. versionchanged:: 2.2 - Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. - - .. versionchanged:: 2.2 - An app context is pushed when running ``app.cli`` commands, so - ``@with_appcontext`` is no longer required for those commands. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands: bool = True, - create_app: t.Callable[..., Flask] | None = None, - add_version_option: bool = True, - load_dotenv: bool = True, - set_debug_flag: bool = True, - **extra: t.Any, - ) -> None: - params: list[click.Parameter] = list(extra.pop("params", None) or ()) - # Processing is done with option callbacks instead of a group - # callback. This allows users to make a custom group callback - # without losing the behavior. --env-file must come first so - # that it is eagerly evaluated before --app. - params.extend((_env_file_option, _app_option, _debug_option)) - - if add_version_option: - params.append(version_option) - - if "context_settings" not in extra: - extra["context_settings"] = {} - - extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") - - super().__init__(params=params, **extra) - - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self) -> None: - if self._loaded_plugin_commands: - return - - if sys.version_info >= (3, 10): - from importlib import metadata - else: - # Use a backport on Python < 3.10. We technically have - # importlib.metadata on 3.8+, but the API changed in 3.10, - # so use the backport for consistency. - import importlib_metadata as metadata # pyright: ignore - - for ep in metadata.entry_points(group="flask.commands"): - self.add_command(ep.load(), ep.name) - - self._loaded_plugin_commands = True - - def get_command(self, ctx: click.Context, name: str) -> click.Command | None: - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - app = info.load_app() - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - return None - - # Push an app context for the loaded app unless it is already - # active somehow. This makes the context available to parameter - # and command callbacks without needing @with_appcontext. - if not current_app or current_app._get_current_object() is not app: # type: ignore[attr-defined] - ctx.with_resource(app.app_context()) - - return app.cli.get_command(ctx, name) - - def list_commands(self, ctx: click.Context) -> list[str]: - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: click.Context | None = None, - **extra: t.Any, - ) -> click.Context: - # Set a flag to tell app.run to become a no-op. If app.run was - # not in a __name__ == __main__ guard, it would start the server - # when importing, blocking whatever command is being called. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - if "obj" not in extra and "obj" not in self.context_settings: - extra["obj"] = ScriptInfo( - create_app=self.create_app, - set_debug_flag=self.set_debug_flag, - load_dotenv_defaults=self.load_dotenv, - ) - - return super().make_context(info_name, args, parent=parent, **extra) - - def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: - if (not args and self.no_args_is_help) or ( - len(args) == 1 and args[0] in self.get_help_option_names(ctx) - ): - # Attempt to load --env-file and --app early in case they - # were given as env vars. Otherwise no_args_is_help will not - # see commands from app.cli. - _env_file_option.handle_parse_result(ctx, {}, []) - _app_option.handle_parse_result(ctx, {}, []) - - return super().parse_args(ctx, args) - - -def _path_is_ancestor(path: str, other: str) -> bool: - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv( - path: str | os.PathLike[str] | None = None, load_defaults: bool = True -) -> bool: - """Load "dotenv" files to set environment variables. A given path takes - precedence over ``.env``, which takes precedence over ``.flaskenv``. After - loading and combining these files, values are only set if the key is not - already set in ``os.environ``. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location. - :param load_defaults: Search for and load the default ``.flaskenv`` and - ``.env`` files. - :return: ``True`` if at least one env var was loaded. - - .. versionchanged:: 3.1 - Added the ``load_defaults`` parameter. A given path takes precedence - over default files. - - .. versionchanged:: 2.0 - The current directory is not changed to the location of the - loaded file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - try: - import dotenv - except ImportError: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env files present. Install python-dotenv" - " to use them.", - fg="yellow", - err=True, - ) - - return False - - data: dict[str, str | None] = {} - - if load_defaults: - for default_name in (".flaskenv", ".env"): - if not (default_path := dotenv.find_dotenv(default_name, usecwd=True)): - continue - - data |= dotenv.dotenv_values(default_path, encoding="utf-8") - - if path is not None and os.path.isfile(path): - data |= dotenv.dotenv_values(path, encoding="utf-8") - - for key, value in data.items(): - if key in os.environ or value is None: - continue - - os.environ[key] = value - - return bool(data) # True if at least one env var was loaded. - - -def show_server_banner(debug: bool, app_import_path: str | None) -> None: - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if is_running_from_reloader(): - return - - if app_import_path is not None: - click.echo(f" * Serving Flask app '{app_import_path}'") - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self) -> None: - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - try: - import ssl - except ImportError: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) from None - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx: click.Context, param: click.Parameter, value: t.Any) -> t.Any: - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - try: - import ssl - except ImportError: - is_context = False - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key" is not used.', - ctx, - param, - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - items = self.split_envvar_value(value) - # can't call no-arg super() inside list comprehension until Python 3.12 - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", - type=CertParamType(), - help="Specify a certificate file to use HTTPS.", - is_eager=True, -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@click.option( - "--exclude-patterns", - default=None, - type=SeparatedPathType(), - help=( - "Files matching these fnmatch patterns will not trigger a reload" - " on change. Multiple patterns are separated by" - f" {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info: ScriptInfo, - host: str, - port: int, - reload: bool, - debugger: bool, - with_threads: bool, - cert: ssl.SSLContext | tuple[str, str | None] | t.Literal["adhoc"] | None, - extra_files: list[str] | None, - exclude_patterns: list[str] | None, -) -> None: - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default with the '--debug' - option. - """ - try: - app: WSGIApplication = info.load_app() # pyright: ignore - except Exception as e: - if is_running_from_reloader(): - # When reloading, print out the error immediately, but raise - # it later so the debugger or server can handle it. - traceback.print_exc() - err = e - - def app( - environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - raise err from None - - else: - # When not reloading, raise the error immediately so the - # command fails. - raise e from None - - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(debug, info.app_import_path) - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - ) - - -run_command.params.insert(0, _debug_option) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {current_app.import_name}\n" - f"Instance: {current_app.instance_path}" - ) - ctx: dict[str, t.Any] = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(current_app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), - default="endpoint", - help=( - "Method to sort routes by. 'match' is the order that Flask will match routes" - " when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - rules = list(current_app.url_map.iter_rules()) - - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} - host_matching = current_app.url_map.host_matching - has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) - rows = [] - - for rule in rules: - row = [ - rule.endpoint, - ", ".join(sorted((rule.methods or set()) - ignored_methods)), - ] - - if has_domain: - row.append((rule.host if host_matching else rule.subdomain) or "") - - row.append(rule.rule) - rows.append(row) - - headers = ["Endpoint", "Methods"] - sorts = ["endpoint", "methods"] - - if has_domain: - headers.append("Host" if host_matching else "Subdomain") - sorts.append("domain") - - headers.append("Rule") - sorts.append("rule") - - try: - rows.sort(key=itemgetter(sorts.index(sort))) - except ValueError: - pass - - rows.insert(0, headers) - widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] - rows.insert(1, ["-" * w for w in widths]) - template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) - - for row in rows: - click.echo(template.format(*row)) - - -cli = FlaskGroup( - name="flask", - help="""\ -A general utility script for Flask applications. - -An application to load must be given with the '--app' option, -'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file -in the current directory. -""", -) - - -def main() -> None: - cli.main() - - -if __name__ == "__main__": - main() diff --git a/venv/lib/python3.10/site-packages/flask/config.py b/venv/lib/python3.10/site-packages/flask/config.py deleted file mode 100644 index 34ef1a5..0000000 --- a/venv/lib/python3.10/site-packages/flask/config.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import errno -import json -import os -import types -import typing as t - -from werkzeug.utils import import_string - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .sansio.app import App - - -T = t.TypeVar("T") - - -class ConfigAttribute(t.Generic[T]): - """Makes an attribute forward to the config""" - - def __init__( - self, name: str, get_converter: t.Callable[[t.Any], T] | None = None - ) -> None: - self.__name__ = name - self.get_converter = get_converter - - @t.overload - def __get__(self, obj: None, owner: None) -> te.Self: ... - - @t.overload - def __get__(self, obj: App, owner: type[App]) -> T: ... - - def __get__(self, obj: App | None, owner: type[App] | None = None) -> T | te.Self: - if obj is None: - return self - - rv = obj.config[self.__name__] - - if self.get_converter is not None: - rv = self.get_converter(rv) - - return rv # type: ignore[no-any-return] - - def __set__(self, obj: App, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): # type: ignore[type-arg] - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__( - self, - root_path: str | os.PathLike[str], - defaults: dict[str, t.Any] | None = None, - ) -> None: - super().__init__(defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_prefixed_env( - self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads - ) -> bool: - """Load any environment variables that start with ``FLASK_``, - dropping the prefix from the env key for the config key. Values - are passed through a loading function to attempt to convert them - to more specific types than strings. - - Keys are loaded in :func:`sorted` order. - - The default loading function attempts to parse values as any - valid JSON type, including dicts and lists. - - Specific items in nested dicts can be set by separating the - keys with double underscores (``__``). If an intermediate key - doesn't exist, it will be initialized to an empty dict. - - :param prefix: Load env vars that start with this prefix, - separated with an underscore (``_``). - :param loads: Pass each string value to this function and use - the returned value as the config value. If any error is - raised it is ignored and the value remains a string. The - default is :func:`json.loads`. - - .. versionadded:: 2.1 - """ - prefix = f"{prefix}_" - - for key in sorted(os.environ): - if not key.startswith(prefix): - continue - - value = os.environ[key] - key = key.removeprefix(prefix) - - try: - value = loads(value) - except Exception: - # Keep the value as a string if loading failed. - pass - - if "__" not in key: - # A non-nested key, set directly. - self[key] = value - continue - - # Traverse nested dictionaries with keys separated by "__". - current = self - *parts, tail = key.split("__") - - for part in parts: - # If an intermediate dict does not exist, create it. - if part not in current: - current[part] = {} - - current = current[part] - - current[tail] = value - - return True - - def from_pyfile( - self, filename: str | os.PathLike[str], silent: bool = False - ) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: object | str) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str | os.PathLike[str], - load: t.Callable[[t.IO[t.Any]], t.Mapping[str, t.Any]], - silent: bool = False, - text: bool = True, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import json - app.config.from_file("config.json", load=json.load) - - import tomllib - app.config.from_file("config.toml", load=tomllib.load, text=False) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :param text: Open the file in text or binary mode. - :return: ``True`` if the file was loaded successfully. - - .. versionchanged:: 2.3 - The ``text`` parameter was added. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename, "r" if text else "rb") as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_mapping( - self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with - non-upper keys. - - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv/lib/python3.10/site-packages/flask/ctx.py b/venv/lib/python3.10/site-packages/flask/ctx.py deleted file mode 100644 index 9b164d3..0000000 --- a/venv/lib/python3.10/site-packages/flask/ctx.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations - -import contextvars -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from . import typing as ft -from .globals import _cv_app -from .globals import _cv_request -from .signals import appcontext_popped -from .signals import appcontext_pushed - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Any | None = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - ctx = _cv_app.get(None) - if ctx is not None: - return f"" - return object.__repr__(self) - - -def after_this_request( - f: ft.AfterRequestCallable[t.Any], -) -> ft.AfterRequestCallable[t.Any]: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'after_this_request' can only be used when a request" - " context is active, such as in a view function." - ) - - ctx._after_request_functions.append(f) - return f - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def copy_current_request_context(f: F) -> F: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'copy_current_request_context' can only be used when a" - " request context is active, such as in a view function." - ) - - ctx = ctx.copy() - - def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: - with ctx: # type: ignore[union-attr] - return ctx.app.ensure_sync(f)(*args, **kwargs) # type: ignore[union-attr] - - return update_wrapper(wrapper, f) # type: ignore[return-value] - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _cv_request.get(None) is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _cv_app.get(None) is not None - - -class AppContext: - """The app context contains application-specific information. An app - context is created and pushed at the beginning of each request if - one is not already active. An app context is also pushed when - running CLI commands. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g: _AppCtxGlobals = app.app_ctx_globals_class() - self._cv_tokens: list[contextvars.Token[AppContext]] = [] - - def push(self) -> None: - """Binds the app context to the current context.""" - self._cv_tokens.append(_cv_app.set(self)) - appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - if len(self._cv_tokens) == 1: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - ctx = _cv_app.get() - _cv_app.reset(self._cv_tokens.pop()) - - if ctx is not self: - raise AssertionError( - f"Popped wrong app context. ({ctx!r} instead of {self!r})" - ) - - appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) - - def __enter__(self) -> AppContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains per-request information. The Flask - app creates and pushes it at the beginning of the request, then pops - it at the end of the request. It will create the URL adapter and - request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the - request. When using the interactive debugger, the context will be - restored so ``request`` is still accessible. Similarly, the test - client can preserve the context after the request ends. However, - teardown functions may already have closed some resources such as - database connections. - """ - - def __init__( - self, - app: Flask, - environ: WSGIEnvironment, - request: Request | None = None, - session: SessionMixin | None = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - request.json_module = app.json - self.request: Request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes: list[tuple[str, str]] | None = None - self.session: SessionMixin | None = session - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: list[ft.AfterRequestCallable[t.Any]] = [] - - self._cv_tokens: list[ - tuple[contextvars.Token[RequestContext], AppContext | None] - ] = [] - - def copy(self) -> RequestContext: - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _cv_app.get(None) - - if app_ctx is None or app_ctx.app is not self.app: - app_ctx = self.app.app_context() - app_ctx.push() - else: - app_ctx = None - - self._cv_tokens.append((_cv_request.set(self), app_ctx)) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - clear_request = len(self._cv_tokens) == 1 - - try: - if clear_request: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - finally: - ctx = _cv_request.get() - token, app_ctx = self._cv_tokens.pop() - _cv_request.reset(token) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - ctx.request.environ["werkzeug.request"] = None - - if app_ctx is not None: - app_ctx.pop(exc) - - if ctx is not self: - raise AssertionError( - f"Popped wrong request context. ({ctx!r} instead of {self!r})" - ) - - def __enter__(self) -> RequestContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/venv/lib/python3.10/site-packages/flask/debughelpers.py b/venv/lib/python3.10/site-packages/flask/debughelpers.py deleted file mode 100644 index 2c8c4c4..0000000 --- a/venv/lib/python3.10/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2.loaders import BaseLoader -from werkzeug.routing import RequestRedirect - -from .blueprints import Blueprint -from .globals import request_ctx -from .sansio.app import App - -if t.TYPE_CHECKING: - from .sansio.scaffold import Scaffold - from .wrappers import Request - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request: Request, key: str) -> None: - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self) -> str: - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised in debug mode if a routing redirect - would cause the browser to drop the method or body. This happens - when method is not GET, HEAD or OPTIONS and the status code is not - 307 or 308. - """ - - def __init__(self, request: Request) -> None: - exc = request.routing_exception - assert isinstance(exc, RequestRedirect) - buf = [ - f"A request was sent to '{request.url}', but routing issued" - f" a redirect to the canonical URL '{exc.new_url}'." - ] - - if f"{request.base_url}/" == exc.new_url.partition("?")[0]: - buf.append( - " The URL was defined with a trailing slash. Flask" - " will redirect to the URL with a trailing slash if it" - " was accessed without one." - ) - - buf.append( - " Send requests to the canonical URL, or use 307 or 308 for" - " routing redirects. Otherwise, browsers will drop form" - " data.\n\n" - "This exception is only raised in debug mode." - ) - super().__init__("".join(buf)) - - -def attach_enctype_error_multidict(request: Request) -> None: - """Patch ``request.files.__getitem__`` to raise a descriptive error - about ``enctype=multipart/form-data``. - - :param request: The request to patch. - :meta private: - """ - oldcls = request.files.__class__ - - class newcls(oldcls): # type: ignore[valid-type, misc] - def __getitem__(self, key: str) -> t.Any: - try: - return super().__getitem__(key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key).with_traceback( - e.__traceback__ - ) from None - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader: BaseLoader) -> t.Iterator[str]: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts( - app: App, - template: str, - attempts: list[ - tuple[ - BaseLoader, - Scaffold, - tuple[str, str | None, t.Callable[[], bool] | None] | None, - ] - ], -) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - if request_ctx and request_ctx.request.blueprint is not None: - blueprint = request_ctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, App): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) diff --git a/venv/lib/python3.10/site-packages/flask/globals.py b/venv/lib/python3.10/site-packages/flask/globals.py deleted file mode 100644 index e2c410c..0000000 --- a/venv/lib/python3.10/site-packages/flask/globals.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextvars import ContextVar - -from werkzeug.local import LocalProxy - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .ctx import _AppCtxGlobals - from .ctx import AppContext - from .ctx import RequestContext - from .sessions import SessionMixin - from .wrappers import Request - - -_no_app_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -the current application. To solve this, set up an application context -with app.app_context(). See the documentation for more information.\ -""" -_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") -app_ctx: AppContext = LocalProxy( # type: ignore[assignment] - _cv_app, unbound_message=_no_app_msg -) -current_app: Flask = LocalProxy( # type: ignore[assignment] - _cv_app, "app", unbound_message=_no_app_msg -) -g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] - _cv_app, "g", unbound_message=_no_app_msg -) - -_no_req_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") -request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] - _cv_request, unbound_message=_no_req_msg -) -request: Request = LocalProxy( # type: ignore[assignment] - _cv_request, "request", unbound_message=_no_req_msg -) -session: SessionMixin = LocalProxy( # type: ignore[assignment] - _cv_request, "session", unbound_message=_no_req_msg -) diff --git a/venv/lib/python3.10/site-packages/flask/helpers.py b/venv/lib/python3.10/site-packages/flask/helpers.py deleted file mode 100644 index a6b7e15..0000000 --- a/venv/lib/python3.10/site-packages/flask/helpers.py +++ /dev/null @@ -1,634 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import sys -import typing as t -from datetime import datetime -from functools import cache -from functools import update_wrapper - -import werkzeug.utils -from werkzeug.exceptions import abort as _wz_abort -from werkzeug.utils import redirect as _wz_redirect -from werkzeug.wrappers import Response as BaseResponse - -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .globals import request_ctx -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated by the - :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. - """ - val = os.environ.get("FLASK_DEBUG") - return bool(val and val.lower() not in {"0", "false", "no"}) - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading default dotenv files by - setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load - the files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -@t.overload -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr], -) -> t.Iterator[t.AnyStr]: ... - - -@t.overload -def stream_with_context( - generator_or_function: t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: ... - - -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Iterator[t.AnyStr] | t.Callable[[t.Iterator[t.AnyStr]], t.Iterator[t.AnyStr]]: - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore[arg-type] - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore[operator] - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore[arg-type] - - def generator() -> t.Iterator[t.AnyStr | None]: - ctx = _cv_request.get(None) - if ctx is None: - raise RuntimeError( - "'stream_with_context' can only be used when a request" - " context is active, such as in a view function." - ) - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - yield from gen - finally: - if hasattr(gen, "close"): - gen.close() - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g # type: ignore[return-value] - - -def make_response(*args: t.Any) -> Response: - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for( - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, -) -> str: - """Generate a URL to the given endpoint with the given values. - - This requires an active request or application context, and calls - :meth:`current_app.url_for() `. See that method - for full documentation. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it is - external. - :param _external: If given, prefer the URL to be internal (False) or - require it to be external (True). External URLs include the - scheme and domain. When not in an active request, URLs are - external by default. - :param values: Values to use for the variable parts of the URL rule. - Unknown keys are appended as query string arguments, like - ``?a=b&c=d``. - - .. versionchanged:: 2.2 - Calls ``current_app.url_for``, allowing an app to override the - behavior. - - .. versionchanged:: 0.10 - The ``_scheme`` parameter was added. - - .. versionchanged:: 0.9 - The ``_anchor`` and ``_method`` parameters were added. - - .. versionchanged:: 0.9 - Calls ``app.handle_url_build_error`` on build errors. - """ - return current_app.url_for( - endpoint, - _anchor=_anchor, - _method=_method, - _scheme=_scheme, - _external=_external, - **values, - ) - - -def redirect( - location: str, code: int = 302, Response: type[BaseResponse] | None = None -) -> BaseResponse: - """Create a redirect response object. - - If :data:`~flask.current_app` is available, it will use its - :meth:`~flask.Flask.redirect` method, otherwise it will use - :func:`werkzeug.utils.redirect`. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - :param Response: The response class to use. Not used when - ``current_app`` is active, which uses ``app.response_class``. - - .. versionadded:: 2.2 - Calls ``current_app.redirect`` if available instead of always - using Werkzeug's default ``redirect``. - """ - if current_app: - return current_app.redirect(location, code=code) - - return _wz_redirect(location, code=code, Response=Response) - - -def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given - status code. - - If :data:`~flask.current_app` is available, it will call its - :attr:`~flask.Flask.aborter` object, otherwise it will use - :func:`werkzeug.exceptions.abort`. - - :param code: The status code for the exception, which must be - registered in ``app.aborter``. - :param args: Passed to the exception. - :param kwargs: Passed to the exception. - - .. versionadded:: 2.2 - Calls ``current_app.aborter`` if available instead of always - using Werkzeug's default ``abort``. - """ - if current_app: - current_app.aborter(code, *args, **kwargs) - - _wz_abort(code, *args, **kwargs) - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - app = current_app._get_current_object() # type: ignore - message_flashed.send( - app, - _async_wrapper=app.ensure_sync, - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> list[str] | list[tuple[str, str]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = request_ctx.flashes - if flashes is None: - flashes = session.pop("_flashes") if "_flashes" in session else [] - request_ctx.flashes = flashes - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: - if kwargs.get("max_age") is None: - kwargs["max_age"] = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - use_x_sendfile=current_app.config["USE_X_SENDFILE"], - response_class=current_app.response_class, - _root_path=current_app.root_path, # type: ignore - ) - return kwargs - - -def send_file( - path_or_file: os.PathLike[t.AnyStr] | str | t.BinaryIO, - mimetype: str | None = None, - as_attachment: bool = False, - download_name: str | None = None, - conditional: bool = True, - etag: bool | str = True, - last_modified: datetime | int | float | None = None, - max_age: None | (int | t.Callable[[str | None], int | None]) = None, -) -> Response: - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - removed because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( # type: ignore[return-value] - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - conditional=conditional, - etag=etag, - last_modified=last_modified, - max_age=max_age, - ) - ) - - -def send_from_directory( - directory: os.PathLike[str] | str, - path: os.PathLike[str] | str, - **kwargs: t.Any, -) -> Response: - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under, - relative to the current application's root path. This *must not* - be a value provided by the client, otherwise it becomes insecure. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - return werkzeug.utils.send_from_directory( # type: ignore[return-value] - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - try: - spec = importlib.util.find_spec(import_name) - - if spec is None: - raise ValueError - except (ImportError, ValueError): - loader = None - else: - loader = spec.loader - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None: - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) # pyright: ignore - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) # type: ignore[no-any-return] - - -@cache -def _split_blueprint_path(name: str) -> list[str]: - out: list[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/venv/lib/python3.10/site-packages/flask/json/__init__.py b/venv/lib/python3.10/site-packages/flask/json/__init__.py deleted file mode 100644 index c0941d0..0000000 --- a/venv/lib/python3.10/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - -from ..globals import current_app -from .provider import _default - -if t.TYPE_CHECKING: # pragma: no cover - from ..wrappers import Response - - -def dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dumps() ` - method, otherwise it will use :func:`json.dumps`. - - :param obj: The data to serialize. - :param kwargs: Arguments passed to the ``dumps`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dumps``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.dumps(obj, **kwargs) - - kwargs.setdefault("default", _default) - return _json.dumps(obj, **kwargs) - - -def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dump() ` - method, otherwise it will use :func:`json.dump`. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: Arguments passed to the ``dump`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dump``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, will be - removed in Flask 2.1. - """ - if current_app: - current_app.json.dump(obj, fp, **kwargs) - else: - kwargs.setdefault("default", _default) - _json.dump(obj, fp, **kwargs) - - -def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.loads() ` - method, otherwise it will use :func:`json.loads`. - - :param s: Text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``loads`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.loads``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The data must be a - string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.loads(s, **kwargs) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.load() ` - method, otherwise it will use :func:`json.load`. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``load`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.load``, allowing an app to override - the behavior. - - .. versionchanged:: 2.2 - The ``app`` parameter will be removed in Flask 2.3. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The file must be text - mode, or binary mode with UTF-8 bytes. - """ - if current_app: - return current_app.json.load(fp, **kwargs) - - return _json.load(fp, **kwargs) - - -def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. A dict or list returned from a view will be converted to a - JSON response automatically without needing to call this. - - This requires an active request or application context, and calls - :meth:`app.json.response() `. - - In debug mode, the output is formatted with indentation to make it - easier to read. This may also be controlled by the provider. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - - .. versionchanged:: 2.2 - Calls ``current_app.json.response``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 0.11 - Added support for serializing top-level arrays. This was a - security risk in ancient browsers. See :ref:`security-json`. - - .. versionadded:: 0.2 - """ - return current_app.json.response(*args, **kwargs) # type: ignore[return-value] diff --git a/venv/lib/python3.10/site-packages/flask/json/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/json/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 9e09002..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/json/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/json/__pycache__/provider.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/json/__pycache__/provider.cpython-310.pyc deleted file mode 100644 index 0a1a928..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/json/__pycache__/provider.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/json/__pycache__/tag.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/json/__pycache__/tag.cpython-310.pyc deleted file mode 100644 index 30b73ec..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/json/__pycache__/tag.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/json/provider.py b/venv/lib/python3.10/site-packages/flask/json/provider.py deleted file mode 100644 index ea7e475..0000000 --- a/venv/lib/python3.10/site-packages/flask/json/provider.py +++ /dev/null @@ -1,215 +0,0 @@ -from __future__ import annotations - -import dataclasses -import decimal -import json -import typing as t -import uuid -import weakref -from datetime import date - -from werkzeug.http import http_date - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.sansio.response import Response - - from ..sansio.app import App - - -class JSONProvider: - """A standard set of JSON operations for an application. Subclasses - of this can be used to customize JSON behavior or use different - JSON libraries. - - To implement a provider for a specific library, subclass this base - class and implement at least :meth:`dumps` and :meth:`loads`. All - other methods have default implementations. - - To use a different provider, either subclass ``Flask`` and set - :attr:`~flask.Flask.json_provider_class` to a provider class, or set - :attr:`app.json ` to an instance of the class. - - :param app: An application instance. This will be stored as a - :class:`weakref.proxy` on the :attr:`_app` attribute. - - .. versionadded:: 2.2 - """ - - def __init__(self, app: App) -> None: - self._app: App = weakref.proxy(app) - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - :param obj: The data to serialize. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: May be passed to the underlying JSON library. - """ - fp.write(self.dumps(obj, **kwargs)) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - :param s: Text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - return self.loads(fp.read(), **kwargs) - - def _prepare_response_obj( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> t.Any: - if args and kwargs: - raise TypeError("app.json.response() takes either args or kwargs, not both") - - if not args and not kwargs: - return None - - if len(args) == 1: - return args[0] - - return args or kwargs - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. - - The :func:`~flask.json.jsonify` function calls this method for - the current application. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - return self._app.response_class(self.dumps(obj), mimetype="application/json") - - -def _default(o: t.Any) -> t.Any: - if isinstance(o, date): - return http_date(o) - - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) # type: ignore[arg-type] - - if hasattr(o, "__html__"): - return str(o.__html__()) - - raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") - - -class DefaultJSONProvider(JSONProvider): - """Provide JSON operations using Python's built-in :mod:`json` - library. Serializes the following additional data types: - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - """ - - default: t.Callable[[t.Any], t.Any] = staticmethod(_default) # type: ignore[assignment] - """Apply this function to any object that :meth:`json.dumps` does - not know how to serialize. It should return a valid JSON type or - raise a ``TypeError``. - """ - - ensure_ascii = True - """Replace non-ASCII characters with escape sequences. This may be - more compatible with some clients, but can be disabled for better - performance and size. - """ - - sort_keys = True - """Sort the keys in any serialized dicts. This may be useful for - some caching situations, but can be disabled for better performance. - When enabled, keys must all be strings, they are not converted - before sorting. - """ - - compact: bool | None = None - """If ``True``, or ``None`` out of debug mode, the :meth:`response` - output will not add indentation, newlines, or spaces. If ``False``, - or ``None`` in debug mode, it will use a non-compact representation. - """ - - mimetype = "application/json" - """The mimetype set in :meth:`response`.""" - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON to a string. - - Keyword arguments are passed to :func:`json.dumps`. Sets some - parameter defaults from the :attr:`default`, - :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. - - :param obj: The data to serialize. - :param kwargs: Passed to :func:`json.dumps`. - """ - kwargs.setdefault("default", self.default) - kwargs.setdefault("ensure_ascii", self.ensure_ascii) - kwargs.setdefault("sort_keys", self.sort_keys) - return json.dumps(obj, **kwargs) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON from a string or bytes. - - :param s: Text or UTF-8 bytes. - :param kwargs: Passed to :func:`json.loads`. - """ - return json.loads(s, **kwargs) - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with it. The response mimetype - will be "application/json" and can be changed with - :attr:`mimetype`. - - If :attr:`compact` is ``False`` or debug mode is enabled, the - output will be formatted to be easier to read. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - dump_args: dict[str, t.Any] = {} - - if (self.compact is None and self._app.debug) or self.compact is False: - dump_args.setdefault("indent", 2) - else: - dump_args.setdefault("separators", (",", ":")) - - return self._app.response_class( - f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype - ) diff --git a/venv/lib/python3.10/site-packages/flask/json/tag.py b/venv/lib/python3.10/site-packages/flask/json/tag.py deleted file mode 100644 index 8dc3629..0000000 --- a/venv/lib/python3.10/site-packages/flask/json/tag.py +++ /dev/null @@ -1,327 +0,0 @@ -""" -Tagged JSON -~~~~~~~~~~~ - -A compact representation for lossless serialization of non-standard JSON -types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this -to serialize the session data, but it may be useful in other places. It -can be extended to support other types. - -.. autoclass:: TaggedJSONSerializer - :members: - -.. autoclass:: JSONTag - :members: - -Let's see an example that adds support for -:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so -to handle this we will dump the items as a list of ``[key, value]`` -pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to -identify the type. The session serializer processes dicts first, so -insert the new tag at the front of the order since ``OrderedDict`` must -be processed before ``dict``. - -.. code-block:: python - - from flask.json.tag import JSONTag - - class TagOrderedDict(JSONTag): - __slots__ = ('serializer',) - key = ' od' - - def check(self, value): - return isinstance(value, OrderedDict) - - def to_json(self, value): - return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] - - def to_python(self, value): - return OrderedDict(value) - - app.session_interface.serializer.register(TagOrderedDict, index=0) -""" - -from __future__ import annotations - -import typing as t -from base64 import b64decode -from base64 import b64encode -from datetime import datetime -from uuid import UUID - -from markupsafe import Markup -from werkzeug.http import http_date -from werkzeug.http import parse_date - -from ..json import dumps -from ..json import loads - - -class JSONTag: - """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" - - __slots__ = ("serializer",) - - #: The tag to mark the serialized object with. If empty, this tag is - #: only used as an intermediate step during tagging. - key: str = "" - - def __init__(self, serializer: TaggedJSONSerializer) -> None: - """Create a tagger for the given serializer.""" - self.serializer = serializer - - def check(self, value: t.Any) -> bool: - """Check if the given value should be tagged by this tag.""" - raise NotImplementedError - - def to_json(self, value: t.Any) -> t.Any: - """Convert the Python object to an object that is a valid JSON type. - The tag will be added later.""" - raise NotImplementedError - - def to_python(self, value: t.Any) -> t.Any: - """Convert the JSON representation back to the correct type. The tag - will already be removed.""" - raise NotImplementedError - - def tag(self, value: t.Any) -> dict[str, t.Any]: - """Convert the value to a valid JSON type and add the tag structure - around it.""" - return {self.key: self.to_json(value)} - - -class TagDict(JSONTag): - """Tag for 1-item dicts whose only key matches a registered tag. - - Internally, the dict key is suffixed with `__`, and the suffix is removed - when deserializing. - """ - - __slots__ = () - key = " di" - - def check(self, value: t.Any) -> bool: - return ( - isinstance(value, dict) - and len(value) == 1 - and next(iter(value)) in self.serializer.tags - ) - - def to_json(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {f"{key}__": self.serializer.tag(value[key])} - - def to_python(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {key[:-2]: value[key]} - - -class PassDict(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, dict) - - def to_json(self, value: t.Any) -> t.Any: - # JSON objects may only have string keys, so don't bother tagging the - # key here. - return {k: self.serializer.tag(v) for k, v in value.items()} - - tag = to_json - - -class TagTuple(JSONTag): - __slots__ = () - key = " t" - - def check(self, value: t.Any) -> bool: - return isinstance(value, tuple) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - def to_python(self, value: t.Any) -> t.Any: - return tuple(value) - - -class PassList(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, list) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - tag = to_json - - -class TagBytes(JSONTag): - __slots__ = () - key = " b" - - def check(self, value: t.Any) -> bool: - return isinstance(value, bytes) - - def to_json(self, value: t.Any) -> t.Any: - return b64encode(value).decode("ascii") - - def to_python(self, value: t.Any) -> t.Any: - return b64decode(value) - - -class TagMarkup(JSONTag): - """Serialize anything matching the :class:`~markupsafe.Markup` API by - having a ``__html__`` method to the result of that method. Always - deserializes to an instance of :class:`~markupsafe.Markup`.""" - - __slots__ = () - key = " m" - - def check(self, value: t.Any) -> bool: - return callable(getattr(value, "__html__", None)) - - def to_json(self, value: t.Any) -> t.Any: - return str(value.__html__()) - - def to_python(self, value: t.Any) -> t.Any: - return Markup(value) - - -class TagUUID(JSONTag): - __slots__ = () - key = " u" - - def check(self, value: t.Any) -> bool: - return isinstance(value, UUID) - - def to_json(self, value: t.Any) -> t.Any: - return value.hex - - def to_python(self, value: t.Any) -> t.Any: - return UUID(value) - - -class TagDateTime(JSONTag): - __slots__ = () - key = " d" - - def check(self, value: t.Any) -> bool: - return isinstance(value, datetime) - - def to_json(self, value: t.Any) -> t.Any: - return http_date(value) - - def to_python(self, value: t.Any) -> t.Any: - return parse_date(value) - - -class TaggedJSONSerializer: - """Serializer that uses a tag system to compactly represent objects that - are not JSON types. Passed as the intermediate serializer to - :class:`itsdangerous.Serializer`. - - The following extra types are supported: - - * :class:`dict` - * :class:`tuple` - * :class:`bytes` - * :class:`~markupsafe.Markup` - * :class:`~uuid.UUID` - * :class:`~datetime.datetime` - """ - - __slots__ = ("tags", "order") - - #: Tag classes to bind when creating the serializer. Other tags can be - #: added later using :meth:`~register`. - default_tags = [ - TagDict, - PassDict, - TagTuple, - PassList, - TagBytes, - TagMarkup, - TagUUID, - TagDateTime, - ] - - def __init__(self) -> None: - self.tags: dict[str, JSONTag] = {} - self.order: list[JSONTag] = [] - - for cls in self.default_tags: - self.register(cls) - - def register( - self, - tag_class: type[JSONTag], - force: bool = False, - index: int | None = None, - ) -> None: - """Register a new tag with this serializer. - - :param tag_class: tag class to register. Will be instantiated with this - serializer instance. - :param force: overwrite an existing tag. If false (default), a - :exc:`KeyError` is raised. - :param index: index to insert the new tag in the tag order. Useful when - the new tag is a special case of an existing tag. If ``None`` - (default), the tag is appended to the end of the order. - - :raise KeyError: if the tag key is already registered and ``force`` is - not true. - """ - tag = tag_class(self) - key = tag.key - - if key: - if not force and key in self.tags: - raise KeyError(f"Tag '{key}' is already registered.") - - self.tags[key] = tag - - if index is None: - self.order.append(tag) - else: - self.order.insert(index, tag) - - def tag(self, value: t.Any) -> t.Any: - """Convert a value to a tagged representation if necessary.""" - for tag in self.order: - if tag.check(value): - return tag.tag(value) - - return value - - def untag(self, value: dict[str, t.Any]) -> t.Any: - """Convert a tagged representation back to the original type.""" - if len(value) != 1: - return value - - key = next(iter(value)) - - if key not in self.tags: - return value - - return self.tags[key].to_python(value[key]) - - def _untag_scan(self, value: t.Any) -> t.Any: - if isinstance(value, dict): - # untag each item recursively - value = {k: self._untag_scan(v) for k, v in value.items()} - # untag the dict itself - value = self.untag(value) - elif isinstance(value, list): - # untag each item recursively - value = [self._untag_scan(item) for item in value] - - return value - - def dumps(self, value: t.Any) -> str: - """Tag the value and dump it to a compact JSON string.""" - return dumps(self.tag(value), separators=(",", ":")) - - def loads(self, value: str) -> t.Any: - """Load data from a JSON string and deserialized any tagged objects.""" - return self._untag_scan(loads(value)) diff --git a/venv/lib/python3.10/site-packages/flask/logging.py b/venv/lib/python3.10/site-packages/flask/logging.py deleted file mode 100644 index 0cb8f43..0000000 --- a/venv/lib/python3.10/site-packages/flask/logging.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -import logging -import sys -import typing as t - -from werkzeug.local import LocalProxy - -from .globals import request - -if t.TYPE_CHECKING: # pragma: no cover - from .sansio.app import App - - -@LocalProxy -def wsgi_errors_stream() -> t.TextIO: - """Find the most appropriate error stream for the application. If a request - is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. - - If you configure your own :class:`logging.StreamHandler`, you may want to - use this for the stream. If you are using file or dict configuration and - can't import this directly, you can refer to it as - ``ext://flask.logging.wsgi_errors_stream``. - """ - if request: - return request.environ["wsgi.errors"] # type: ignore[no-any-return] - - return sys.stderr - - -def has_level_handler(logger: logging.Logger) -> bool: - """Check if there is a handler in the logging chain that will handle the - given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. - """ - level = logger.getEffectiveLevel() - current = logger - - while current: - if any(handler.level <= level for handler in current.handlers): - return True - - if not current.propagate: - break - - current = current.parent # type: ignore - - return False - - -#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format -#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. -default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore -default_handler.setFormatter( - logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") -) - - -def create_logger(app: App) -> logging.Logger: - """Get the Flask app's logger and configure it if needed. - - The logger name will be the same as - :attr:`app.import_name `. - - When :attr:`~flask.Flask.debug` is enabled, set the logger level to - :data:`logging.DEBUG` if it is not set. - - If there is no handler for the logger's effective level, add a - :class:`~logging.StreamHandler` for - :func:`~flask.logging.wsgi_errors_stream` with a basic format. - """ - logger = logging.getLogger(app.name) - - if app.debug and not logger.level: - logger.setLevel(logging.DEBUG) - - if not has_level_handler(logger): - logger.addHandler(default_handler) - - return logger diff --git a/venv/lib/python3.10/site-packages/flask/py.typed b/venv/lib/python3.10/site-packages/flask/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/flask/sansio/README.md b/venv/lib/python3.10/site-packages/flask/sansio/README.md deleted file mode 100644 index 623ac19..0000000 --- a/venv/lib/python3.10/site-packages/flask/sansio/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Sansio - -This folder contains code that can be used by alternative Flask -implementations, for example Quart. The code therefore cannot do any -IO, nor be part of a likely IO path. Finally this code cannot use the -Flask globals. diff --git a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/app.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/app.cpython-310.pyc deleted file mode 100644 index 9626f9f..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/app.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/blueprints.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/blueprints.cpython-310.pyc deleted file mode 100644 index e6b4c1c..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/blueprints.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/scaffold.cpython-310.pyc b/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/scaffold.cpython-310.pyc deleted file mode 100644 index 0a12a79..0000000 Binary files a/venv/lib/python3.10/site-packages/flask/sansio/__pycache__/scaffold.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/flask/sansio/app.py b/venv/lib/python3.10/site-packages/flask/sansio/app.py deleted file mode 100644 index 3620171..0000000 --- a/venv/lib/python3.10/site-packages/flask/sansio/app.py +++ /dev/null @@ -1,964 +0,0 @@ -from __future__ import annotations - -import logging -import os -import sys -import typing as t -from datetime import timedelta -from itertools import chain - -from werkzeug.exceptions import Aborter -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import Rule -from werkzeug.sansio.response import Response -from werkzeug.utils import cached_property -from werkzeug.utils import redirect as _wz_redirect - -from .. import typing as ft -from ..config import Config -from ..config import ConfigAttribute -from ..ctx import _AppCtxGlobals -from ..helpers import _split_blueprint_path -from ..helpers import get_debug_flag -from ..json.provider import DefaultJSONProvider -from ..json.provider import JSONProvider -from ..logging import create_logger -from ..templating import DispatchingJinjaLoader -from ..templating import Environment -from .scaffold import _endpoint_from_view_func -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.wrappers import Response as BaseResponse - - from ..testing import FlaskClient - from ..testing import FlaskCliRunner - from .blueprints import Blueprint - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class App(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class of the object assigned to :attr:`aborter`, created by - #: :meth:`create_aborter`. That object is called by - #: :func:`flask.abort` to raise HTTP errors, and can be - #: called directly as well. - #: - #: Defaults to :class:`werkzeug.exceptions.Aborter`. - #: - #: .. versionadded:: 2.2 - aborter_class = Aborter - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute[bool]("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute[t.Union[str, bytes, None]]("SECRET_KEY") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute[timedelta]( - "PERMANENT_SESSION_LIFETIME", - get_converter=_make_timedelta, # type: ignore[arg-type] - ) - - json_provider_class: type[JSONProvider] = DefaultJSONProvider - """A subclass of :class:`~flask.json.provider.JSONProvider`. An - instance is created and assigned to :attr:`app.json` when creating - the app. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses - Python's built-in :mod:`json` library. A different provider can use - a different JSON library. - - .. versionadded:: 2.2 - """ - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict[str, t.Any] = {} - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: type[FlaskClient] | None = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: type[FlaskCliRunner] | None = None - - default_config: dict[str, t.Any] - response_class: type[Response] - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ) -> None: - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: An instance of :attr:`aborter_class` created by - #: :meth:`make_aborter`. This is called by :func:`flask.abort` - #: to raise HTTP errors, and can be called directly as well. - #: - #: .. versionadded:: 2.2 - #: Moved from ``flask.abort``, which calls this object. - self.aborter = self.make_aborter() - - self.json: JSONProvider = self.json_provider_class(self) - """Provides access to JSON methods. Functions in ``flask.json`` - will call methods on this provider when the application context - is active. Used for handling JSON requests and responses. - - An instance of :attr:`json_provider_class`. Can be customized by - changing that attribute on a subclass, or by assigning to this - attribute afterwards. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, - uses Python's built-in :mod:`json` library. A different provider - can use a different JSON library. - - .. versionadded:: 2.2 - """ - - #: A list of functions that are called by - #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function is called - #: with ``error``, ``endpoint`` and ``values``. If a function - #: returns ``None`` or raises a ``BuildError``, it is skipped. - #: Otherwise, its return value is returned by ``url_for``. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: list[ - t.Callable[[Exception, str, dict[str, t.Any]], str] - ] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: dict[str, Blueprint] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict[str, t.Any] = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class(host_matching=host_matching) - - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_first_request: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called" - " on the application. It has already handled its first" - " request, any changes will not be applied" - " consistently.\n" - "Make sure all imports, decorators, functions, etc." - " needed to set up the application are done before" - " running it." - ) - - @cached_property - def name(self) -> str: # type: ignore - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn: str | None = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - def create_jinja_environment(self) -> Environment: - raise NotImplementedError() - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def make_aborter(self) -> Aborter: - """Create the object to assign to :attr:`aborter`. That object - is called by :func:`flask.abort` to raise HTTP errors, and can - be called directly as well. - - By default, this creates an instance of :attr:`aborter_class`, - which defaults to :class:`werkzeug.exceptions.Aborter`. - - .. versionadded:: 2.2 - """ - return self.aborter_class() - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionchanged:: 2.2 - Autoescaping is now enabled by default for ``.svg`` files. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start the - development server, an interactive debugger will be shown for unhandled - exceptions, and the server will be reloaded when code changes. This maps to the - :data:`DEBUG` config key. It may not behave as expected if set late. - - **Do not enable debug mode when deploying in production.** - - Default: ``False`` - """ - return self.config["DEBUG"] # type: ignore[no-any-return] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - - if self.config["TEMPLATES_AUTO_RELOAD"] is None: - self.jinja_env.auto_reload = value - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView[Blueprint]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods: set[str] = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods and self.config["PROVIDE_AUTOMATIC_OPTIONS"]: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule_obj = self.url_rule_class(rule, methods=methods, **options) - rule_obj.provide_automatic_options = provide_automatic_options # type: ignore[attr-defined] - - self.url_map.add(rule_obj) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def teardown_appcontext(self, f: T_teardown) -> T_teardown: - """Registers a function to be called when the application - context is popped. The application context is typically popped - after the request context for each request, at the end of CLI - commands, or after a manually pushed context ends. - - .. code-block:: python - - with app.app_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the app context is - made inactive. Since a request context typically also manages an - application context it would also be called when you pop a - request context. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor( - self, f: T_shell_context_processor - ) -> T_shell_context_processor: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler( - self, e: Exception, blueprints: list[str] - ) -> ft.ErrorHandlerCallable | None: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def should_ignore_error(self, error: BaseException | None) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def redirect(self, location: str, code: int = 302) -> BaseResponse: - """Create a redirect response object. - - This is called by :func:`flask.redirect`, and can be called - directly as well. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - - .. versionadded:: 2.2 - Moved from ``flask.redirect``, which calls this method. - """ - return _wz_redirect( - location, - code=code, - Response=self.response_class, # type: ignore[arg-type] - ) - - def inject_url_defaults(self, endpoint: str, values: dict[str, t.Any]) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[str | None] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: BuildError, endpoint: str, values: dict[str, t.Any] - ) -> str: - """Called by :meth:`.url_for` if a - :exc:`~werkzeug.routing.BuildError` was raised. If this returns - a value, it will be returned by ``url_for``, otherwise the error - will be re-raised. - - Each function in :attr:`url_build_error_handlers` is called with - ``error``, ``endpoint`` and ``values``. If a function returns - ``None`` or raises a ``BuildError``, it is skipped. Otherwise, - its return value is returned by ``url_for``. - - :param error: The active ``BuildError`` being handled. - :param endpoint: The endpoint being built. - :param values: The keyword arguments passed to ``url_for``. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error diff --git a/venv/lib/python3.10/site-packages/flask/sansio/blueprints.py b/venv/lib/python3.10/site-packages/flask/sansio/blueprints.py deleted file mode 100644 index 4f912cc..0000000 --- a/venv/lib/python3.10/site-packages/flask/sansio/blueprints.py +++ /dev/null @@ -1,632 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from .. import typing as ft -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from .app import App - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], None] -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: Blueprint, - app: App, - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - _got_registered_once = False - - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore[assignment] - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if not name: - raise ValueError("'name' may not be empty.") - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: list[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: list[tuple[Blueprint, dict[str, t.Any]]] = [] - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_registered_once: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called on the blueprint" - f" '{self.name}'. It has already been registered at least once, any" - " changes will not be applied consistently.\n" - "Make sure all imports, decorators, functions, etc. needed to set up" - " the blueprint are done before registering it." - ) - - @setupmethod - def record(self, func: DeferredSetupFunction) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - self.deferred_functions.append(func) - - @setupmethod - def record_once(self, func: DeferredSetupFunction) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: App, options: dict[str, t.Any], first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: App, options: dict[str, t.Any]) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.3 - Nested blueprints now correctly apply subdomains. - - .. versionchanged:: 2.1 - Registering the same blueprint with the same name multiple - times is an error. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - bp_desc = "this" if app.blueprints[name] is self else "a different" - existing_at = f" '{name}'" if self_name != name else "" - - raise ValueError( - f"The name '{self_name}' is already registered for" - f" {bp_desc} blueprint{existing_at}. Use 'name=' to" - f" provide a unique name." - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, # type: ignore[attr-defined] - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - self._merge_blueprint_funcs(app, name) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - bp_subdomain = bp_options.get("subdomain") - - if bp_subdomain is None: - bp_subdomain = blueprint.subdomain - - if state.subdomain is not None and bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain + "." + state.subdomain - elif bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain - elif state.subdomain is not None: - bp_options["subdomain"] = state.subdomain - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - def _merge_blueprint_funcs(self, app: App, name: str) -> None: - def extend( - bp_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - parent_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - ) -> None: - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: {exc_class: func for exc_class, func in code_values.items()} - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for - full documentation. - - The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, - used with :func:`url_for`, is prefixed with the blueprint's name. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - @setupmethod - def app_template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """Register a template filter, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a template filter, available in any template rendered by the - application. Works like the :meth:`app_template_filter` decorator. Equivalent to - :meth:`.Flask.add_template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """Register a template test, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_app_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a template test, available in any template rendered by the - application. Works like the :meth:`app_template_test` decorator. Equivalent to - :meth:`.Flask.add_template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """Register a template global, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_app_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a template global, available in any template rendered by the - application. Works like the :meth:`app_template_global` decorator. Equivalent to - :meth:`.Flask.add_template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def before_app_request(self, f: T_before_request) -> T_before_request: - """Like :meth:`before_request`, but before every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.before_request`. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def after_app_request(self, f: T_after_request) -> T_after_request: - """Like :meth:`after_request`, but after every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.after_request`. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def teardown_app_request(self, f: T_teardown) -> T_teardown: - """Like :meth:`teardown_request`, but after every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_context_processor( - self, f: T_template_context_processor - ) -> T_template_context_processor: - """Like :meth:`context_processor`, but for templates rendered by every view, not - only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_errorhandler( - self, code: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Like :meth:`errorhandler`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.errorhandler`. - """ - - def decorator(f: T_error_handler) -> T_error_handler: - def from_blueprint(state: BlueprintSetupState) -> None: - state.app.errorhandler(code)(f) - - self.record_once(from_blueprint) - return f - - return decorator - - @setupmethod - def app_url_value_preprocessor( - self, f: T_url_value_preprocessor - ) -> T_url_value_preprocessor: - """Like :meth:`url_value_preprocessor`, but for every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Like :meth:`url_defaults`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.url_defaults`. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/venv/lib/python3.10/site-packages/flask/sansio/scaffold.py b/venv/lib/python3.10/site-packages/flask/sansio/scaffold.py deleted file mode 100644 index 3a839f5..0000000 --- a/venv/lib/python3.10/site-packages/flask/sansio/scaffold.py +++ /dev/null @@ -1,792 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import pathlib -import sys -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from jinja2 import BaseLoader -from jinja2 import FileSystemLoader -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.utils import cached_property - -from .. import typing as ft -from ..helpers import get_root_path -from ..templating import _default_template_ctx_processor - -if t.TYPE_CHECKING: # pragma: no cover - from click import Group - -# a singleton sentinel value for parameter defaults -_sentinel = object() - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) -T_route = t.TypeVar("T_route", bound=ft.RouteCallable) - - -def setupmethod(f: F) -> F: - f_name = f.__name__ - - def wrapper_func(self: Scaffold, *args: t.Any, **kwargs: t.Any) -> t.Any: - self._check_setup_finished(f_name) - return f(self, *args, **kwargs) - - return t.cast(F, update_wrapper(wrapper_func, f)) - - -class Scaffold: - """Common behavior shared between :class:`~flask.Flask` and - :class:`~flask.blueprints.Blueprint`. - - :param import_name: The import name of the module where this object - is defined. Usually :attr:`__name__` should be used. - :param static_folder: Path to a folder of static files to serve. - If this is set, a static route will be added. - :param static_url_path: URL prefix for the static route. - :param template_folder: Path to a folder containing template files. - for rendering. If this is set, a Jinja loader will be added. - :param root_path: The path that static, template, and resource files - are relative to. Typically not set, it is discovered based on - the ``import_name``. - - .. versionadded:: 2.0 - """ - - cli: Group - name: str - _static_folder: str | None = None - _static_url_path: str | None = None - - def __init__( - self, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - root_path: str | None = None, - ): - #: The name of the package or module that this object belongs - #: to. Do not change this once it is set by the constructor. - self.import_name = import_name - - self.static_folder = static_folder # type: ignore - self.static_url_path = static_url_path - - #: The path to the templates folder, relative to - #: :attr:`root_path`, to add to the template loader. ``None`` if - #: templates should not be added. - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - #: Absolute path to the package on the filesystem. Used to look - #: up resources contained in the package. - self.root_path = root_path - - #: A dictionary mapping endpoint names to view functions. - #: - #: To register a view function, use the :meth:`route` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.view_functions: dict[str, ft.RouteCallable] = {} - - #: A data structure of registered error handlers, in the format - #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is - #: the name of a blueprint the handlers are active for, or - #: ``None`` for all requests. The ``code`` key is the HTTP - #: status code for ``HTTPException``, or ``None`` for - #: other exceptions. The innermost dictionary maps exception - #: classes to handler functions. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.error_handler_spec: dict[ - ft.AppOrBlueprintKey, - dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], - ] = defaultdict(lambda: defaultdict(dict)) - - #: A data structure of functions to call at the beginning of - #: each request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`before_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.before_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`after_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.after_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.AfterRequestCallable[t.Any]] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request even if an exception is raised, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`teardown_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.teardown_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.TeardownCallable] - ] = defaultdict(list) - - #: A data structure of functions to call to pass extra context - #: values when rendering templates, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`context_processor` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.template_context_processors: dict[ - ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] - ] = defaultdict(list, {None: [_default_template_ctx_processor]}) - - #: A data structure of functions to call to modify the keyword - #: arguments passed to the view function, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the - #: :meth:`url_value_preprocessor` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_value_preprocessors: dict[ - ft.AppOrBlueprintKey, - list[ft.URLValuePreprocessorCallable], - ] = defaultdict(list) - - #: A data structure of functions to call to modify the keyword - #: arguments when generating URLs, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`url_defaults` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_default_functions: dict[ - ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] - ] = defaultdict(list) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.name!r}>" - - def _check_setup_finished(self, f_name: str) -> None: - raise NotImplementedError - - @property - def static_folder(self) -> str | None: - """The absolute path to the configured static folder. ``None`` - if no static folder is set. - """ - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - else: - return None - - @static_folder.setter - def static_folder(self, value: str | os.PathLike[str] | None) -> None: - if value is not None: - value = os.fspath(value).rstrip(r"\/") - - self._static_folder = value - - @property - def has_static_folder(self) -> bool: - """``True`` if :attr:`static_folder` is set. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @property - def static_url_path(self) -> str | None: - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return f"/{basename}".rstrip("/") - - return None - - @static_url_path.setter - def static_url_path(self, value: str | None) -> None: - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - @cached_property - def jinja_loader(self) -> BaseLoader | None: - """The Jinja loader for this object's templates. By default this - is a class :class:`jinja2.loaders.FileSystemLoader` to - :attr:`template_folder` if it is set. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - else: - return None - - def _method_route( - self, - method: str, - rule: str, - options: dict[str, t.Any], - ) -> t.Callable[[T_route], T_route]: - if "methods" in options: - raise TypeError("Use the 'route' decorator to use the 'methods' argument.") - - return self.route(rule, methods=[method], **options) - - @setupmethod - def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["GET"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("GET", rule, options) - - @setupmethod - def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["POST"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("POST", rule, options) - - @setupmethod - def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PUT"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PUT", rule, options) - - @setupmethod - def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["DELETE"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("DELETE", rule, options) - - @setupmethod - def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PATCH"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PATCH", rule, options) - - @setupmethod - def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Decorate a view function to register it with the given URL - rule and options. Calls :meth:`add_url_rule`, which has more - details about the implementation. - - .. code-block:: python - - @app.route("/") - def index(): - return "Hello, World!" - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and - ``OPTIONS`` are added automatically. - - :param rule: The URL rule string. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - - def decorator(f: T_route) -> T_route: - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a rule for routing incoming requests and building - URLs. The :meth:`route` decorator is a shortcut to call this - with the ``view_func`` argument. These are equivalent: - - .. code-block:: python - - @app.route("/") - def index(): - ... - - .. code-block:: python - - def index(): - ... - - app.add_url_rule("/", view_func=index) - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. An error - will be raised if a function has already been registered for the - endpoint. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is - always added automatically, and ``OPTIONS`` is added - automatically by default. - - ``view_func`` does not necessarily need to be passed, but if the - rule should participate in routing an endpoint name must be - associated with a view function at some point with the - :meth:`endpoint` decorator. - - .. code-block:: python - - app.add_url_rule("/", endpoint="index") - - @app.endpoint("index") - def index(): - ... - - If ``view_func`` has a ``required_methods`` attribute, those - methods are added to the passed and automatic methods. If it - has a ``provide_automatic_methods`` attribute, it is used as the - default if the parameter is not passed. - - :param rule: The URL rule string. - :param endpoint: The endpoint name to associate with the rule - and view function. Used when routing and building URLs. - Defaults to ``view_func.__name__``. - :param view_func: The view function to associate with the - endpoint name. - :param provide_automatic_options: Add the ``OPTIONS`` method and - respond to ``OPTIONS`` requests automatically. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - raise NotImplementedError - - @setupmethod - def endpoint(self, endpoint: str) -> t.Callable[[F], F]: - """Decorate a view function to register it for the given - endpoint. Used if a rule is added without a ``view_func`` with - :meth:`add_url_rule`. - - .. code-block:: python - - app.add_url_rule("/ex", endpoint="example") - - @app.endpoint("example") - def example(): - ... - - :param endpoint: The endpoint name to associate with the view - function. - """ - - def decorator(f: F) -> F: - self.view_functions[endpoint] = f - return f - - return decorator - - @setupmethod - def before_request(self, f: T_before_request) -> T_before_request: - """Register a function to run before each request. - - For example, this can be used to open a database connection, or - to load the logged in user from the session. - - .. code-block:: python - - @app.before_request - def load_user(): - if "user_id" in session: - g.user = db.session.get(session["user_id"]) - - The function will be called without any arguments. If it returns - a non-``None`` value, the value is handled as if it was the - return value from the view, and further request handling is - stopped. - - This is available on both app and blueprint objects. When used on an app, this - executes before every request. When used on a blueprint, this executes before - every request that the blueprint handles. To register with a blueprint and - execute before every request, use :meth:`.Blueprint.before_app_request`. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def after_request(self, f: T_after_request) -> T_after_request: - """Register a function to run after each request to this object. - - The function is called with the response object, and must return - a response object. This allows the functions to modify or - replace the response before it is sent. - - If a function raises an exception, any remaining - ``after_request`` functions will not be called. Therefore, this - should not be used for actions that must execute, such as to - close resources. Use :meth:`teardown_request` for that. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.after_app_request`. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f: T_teardown) -> T_teardown: - """Register a function to be called when the request context is - popped. Typically this happens at the end of each request, but - contexts may be pushed manually as well during testing. - - .. code-block:: python - - with app.test_request_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the request context is - made inactive. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.teardown_app_request`. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def context_processor( - self, - f: T_template_context_processor, - ) -> T_template_context_processor: - """Registers a template context processor function. These functions run before - rendering a template. The keys of the returned dict are added as variables - available in the template. - - This is available on both app and blueprint objects. When used on an app, this - is called for every rendered template. When used on a blueprint, this is called - for templates rendered from the blueprint's views. To register with a blueprint - and affect every template, use :meth:`.Blueprint.app_context_processor`. - """ - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor( - self, - f: T_url_value_preprocessor, - ) -> T_url_value_preprocessor: - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_value_preprocessor`. - """ - self.url_value_preprocessors[None].append(f) - return f - - @setupmethod - def url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_defaults`. - """ - self.url_default_functions[None].append(f) - return f - - @setupmethod - def errorhandler( - self, code_or_exception: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - This is available on both app and blueprint objects. When used on an app, this - can handle errors from every request. When used on a blueprint, this can handle - errors from requests that the blueprint handles. To register with a blueprint - and affect every request, use :meth:`.Blueprint.app_errorhandler`. - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.register_error_handler(code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler( - self, - code_or_exception: type[Exception] | int, - f: ft.ErrorHandlerCallable, - ) -> None: - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - exc_class, code = self._get_exc_class_and_code(code_or_exception) - self.error_handler_spec[None][code][exc_class] = f - - @staticmethod - def _get_exc_class_and_code( - exc_class_or_code: type[Exception] | int, - ) -> tuple[type[Exception], int | None]: - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - exc_class: type[Exception] - - if isinstance(exc_class_or_code, int): - try: - exc_class = default_exceptions[exc_class_or_code] - except KeyError: - raise ValueError( - f"'{exc_class_or_code}' is not a recognized HTTP" - " error code. Use a subclass of HTTPException with" - " that code instead." - ) from None - else: - exc_class = exc_class_or_code - - if isinstance(exc_class, Exception): - raise TypeError( - f"{exc_class!r} is an instance, not a class. Handlers" - " can only be registered for Exception classes or HTTP" - " error codes." - ) - - if not issubclass(exc_class, Exception): - raise ValueError( - f"'{exc_class.__name__}' is not a subclass of Exception." - " Handlers can only be registered for Exception classes" - " or HTTP error codes." - ) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - -def _endpoint_from_view_func(view_func: ft.RouteCallable) -> str: - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def _find_package_path(import_name: str) -> str: - """Find the path that contains the package or module.""" - root_mod_name, _, _ = import_name.partition(".") - - try: - root_spec = importlib.util.find_spec(root_mod_name) - - if root_spec is None: - raise ValueError("not found") - except (ImportError, ValueError): - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - we raised `ValueError` due to `root_spec` being `None` - return os.getcwd() - - if root_spec.submodule_search_locations: - if root_spec.origin is None or root_spec.origin == "namespace": - # namespace package - package_spec = importlib.util.find_spec(import_name) - - if package_spec is not None and package_spec.submodule_search_locations: - # Pick the path in the namespace that contains the submodule. - package_path = pathlib.Path( - os.path.commonpath(package_spec.submodule_search_locations) - ) - search_location = next( - location - for location in root_spec.submodule_search_locations - if package_path.is_relative_to(location) - ) - else: - # Pick the first path. - search_location = root_spec.submodule_search_locations[0] - - return os.path.dirname(search_location) - else: - # package with __init__.py - return os.path.dirname(os.path.dirname(root_spec.origin)) - else: - # module - return os.path.dirname(root_spec.origin) # type: ignore[type-var, return-value] - - -def find_package(import_name: str) -> tuple[str | None, str]: - """Find the prefix that a package is installed under, and the path - that it would be imported from. - - The prefix is the directory containing the standard directory - hierarchy (lib, bin, etc.). If the package is not installed to the - system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), - ``None`` is returned. - - The path is the entry in :attr:`sys.path` that contains the package - for import. If the package is not installed, it's assumed that the - package was imported from the current working directory. - """ - package_path = _find_package_path(import_name) - py_prefix = os.path.abspath(sys.prefix) - - # installed to the system - if pathlib.PurePath(package_path).is_relative_to(py_prefix): - return py_prefix, package_path - - site_parent, site_folder = os.path.split(package_path) - - # installed to a virtualenv - if site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - - # Windows (prefix/lib/site-packages) - if folder.lower() == "lib": - return parent, package_path - - # Unix (prefix/lib/pythonX.Y/site-packages) - if os.path.basename(parent).lower() == "lib": - return os.path.dirname(parent), package_path - - # something else (prefix/site-packages) - return site_parent, package_path - - # not installed - return None, package_path diff --git a/venv/lib/python3.10/site-packages/flask/sessions.py b/venv/lib/python3.10/site-packages/flask/sessions.py deleted file mode 100644 index 4ffde71..0000000 --- a/venv/lib/python3.10/site-packages/flask/sessions.py +++ /dev/null @@ -1,399 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import hashlib -import typing as t -from collections.abc import MutableMapping -from datetime import datetime -from datetime import timezone - -from itsdangerous import BadSignature -from itsdangerous import URLSafeTimedSerializer -from werkzeug.datastructures import CallbackDict - -from .json.tag import TaggedJSONSerializer - -if t.TYPE_CHECKING: # pragma: no cover - import typing_extensions as te - - from .app import Flask - from .wrappers import Request - from .wrappers import Response - - -class SessionMixin(MutableMapping[str, t.Any]): - """Expands a basic dictionary with session attributes.""" - - @property - def permanent(self) -> bool: - """This reflects the ``'_permanent'`` key in the dict.""" - return self.get("_permanent", False) - - @permanent.setter - def permanent(self, value: bool) -> None: - self["_permanent"] = bool(value) - - #: Some implementations can detect whether a session is newly - #: created, but that is not guaranteed. Use with caution. The mixin - # default is hard-coded ``False``. - new = False - - #: Some implementations can detect changes to the session and set - #: this when that happens. The mixin default is hard coded to - #: ``True``. - modified = True - - #: Some implementations can detect when session data is read or - #: written and set this when that happens. The mixin default is hard - #: coded to ``True``. - accessed = True - - -class SecureCookieSession(CallbackDict[str, t.Any], SessionMixin): - """Base class for sessions based on signed cookies. - - This session backend will set the :attr:`modified` and - :attr:`accessed` attributes. It cannot reliably track whether a - session is new (vs. empty), so :attr:`new` remains hard coded to - ``False``. - """ - - #: When data is changed, this is set to ``True``. Only the session - #: dictionary itself is tracked; if the session contains mutable - #: data (for example a nested dict) then this must be set to - #: ``True`` manually when modifying that data. The session cookie - #: will only be written to the response if this is ``True``. - modified = False - - #: When data is read or written, this is set to ``True``. Used by - # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` - #: header, which allows caching proxies to cache different pages for - #: different users. - accessed = False - - def __init__( - self, - initial: c.Mapping[str, t.Any] | c.Iterable[tuple[str, t.Any]] | None = None, - ) -> None: - def on_update(self: te.Self) -> None: - self.modified = True - self.accessed = True - - super().__init__(initial, on_update) - - def __getitem__(self, key: str) -> t.Any: - self.accessed = True - return super().__getitem__(key) - - def get(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().get(key, default) - - def setdefault(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().setdefault(key, default) - - -class NullSession(SecureCookieSession): - """Class used to generate nicer error messages if sessions are not - available. Will still allow read-only access to the empty session - but fail on setting. - """ - - def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - raise RuntimeError( - "The session is unavailable because no secret " - "key was set. Set the secret_key on the " - "application to something unique and secret." - ) - - __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # type: ignore # noqa: B950 - del _fail - - -class SessionInterface: - """The basic interface you have to implement in order to replace the - default session interface which uses werkzeug's securecookie - implementation. The only methods you have to implement are - :meth:`open_session` and :meth:`save_session`, the others have - useful defaults which you don't need to change. - - The session object returned by the :meth:`open_session` method has to - provide a dictionary like interface plus the properties and methods - from the :class:`SessionMixin`. We recommend just subclassing a dict - and adding that mixin:: - - class Session(dict, SessionMixin): - pass - - If :meth:`open_session` returns ``None`` Flask will call into - :meth:`make_null_session` to create a session that acts as replacement - if the session support cannot work because some requirement is not - fulfilled. The default :class:`NullSession` class that is created - will complain that the secret key was not set. - - To replace the session interface on an application all you have to do - is to assign :attr:`flask.Flask.session_interface`:: - - app = Flask(__name__) - app.session_interface = MySessionInterface() - - Multiple requests with the same session may be sent and handled - concurrently. When implementing a new session interface, consider - whether reads or writes to the backing store must be synchronized. - There is no guarantee on the order in which the session for each - request is opened or saved, it will occur in the order that requests - begin and end processing. - - .. versionadded:: 0.8 - """ - - #: :meth:`make_null_session` will look here for the class that should - #: be created when a null session is requested. Likewise the - #: :meth:`is_null_session` method will perform a typecheck against - #: this type. - null_session_class = NullSession - - #: A flag that indicates if the session interface is pickle based. - #: This can be used by Flask extensions to make a decision in regards - #: to how to deal with the session object. - #: - #: .. versionadded:: 0.10 - pickle_based = False - - def make_null_session(self, app: Flask) -> NullSession: - """Creates a null session which acts as a replacement object if the - real session support could not be loaded due to a configuration - error. This mainly aids the user experience because the job of the - null session is to still support lookup without complaining but - modifications are answered with a helpful error message of what - failed. - - This creates an instance of :attr:`null_session_class` by default. - """ - return self.null_session_class() - - def is_null_session(self, obj: object) -> bool: - """Checks if a given object is a null session. Null sessions are - not asked to be saved. - - This checks if the object is an instance of :attr:`null_session_class` - by default. - """ - return isinstance(obj, self.null_session_class) - - def get_cookie_name(self, app: Flask) -> str: - """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" - return app.config["SESSION_COOKIE_NAME"] # type: ignore[no-any-return] - - def get_cookie_domain(self, app: Flask) -> str | None: - """The value of the ``Domain`` parameter on the session cookie. If not set, - browsers will only send the cookie to the exact domain it was set from. - Otherwise, they will send it to any subdomain of the given value as well. - - Uses the :data:`SESSION_COOKIE_DOMAIN` config. - - .. versionchanged:: 2.3 - Not set by default, does not fall back to ``SERVER_NAME``. - """ - return app.config["SESSION_COOKIE_DOMAIN"] # type: ignore[no-any-return] - - def get_cookie_path(self, app: Flask) -> str: - """Returns the path for which the cookie should be valid. The - default implementation uses the value from the ``SESSION_COOKIE_PATH`` - config var if it's set, and falls back to ``APPLICATION_ROOT`` or - uses ``/`` if it's ``None``. - """ - return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] # type: ignore[no-any-return] - - def get_cookie_httponly(self, app: Flask) -> bool: - """Returns True if the session cookie should be httponly. This - currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` - config var. - """ - return app.config["SESSION_COOKIE_HTTPONLY"] # type: ignore[no-any-return] - - def get_cookie_secure(self, app: Flask) -> bool: - """Returns True if the cookie should be secure. This currently - just returns the value of the ``SESSION_COOKIE_SECURE`` setting. - """ - return app.config["SESSION_COOKIE_SECURE"] # type: ignore[no-any-return] - - def get_cookie_samesite(self, app: Flask) -> str | None: - """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the - ``SameSite`` attribute. This currently just returns the value of - the :data:`SESSION_COOKIE_SAMESITE` setting. - """ - return app.config["SESSION_COOKIE_SAMESITE"] # type: ignore[no-any-return] - - def get_cookie_partitioned(self, app: Flask) -> bool: - """Returns True if the cookie should be partitioned. By default, uses - the value of :data:`SESSION_COOKIE_PARTITIONED`. - - .. versionadded:: 3.1 - """ - return app.config["SESSION_COOKIE_PARTITIONED"] # type: ignore[no-any-return] - - def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: - """A helper method that returns an expiration date for the session - or ``None`` if the session is linked to the browser session. The - default implementation returns now + the permanent session - lifetime configured on the application. - """ - if session.permanent: - return datetime.now(timezone.utc) + app.permanent_session_lifetime - return None - - def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: - """Used by session backends to determine if a ``Set-Cookie`` header - should be set for this session cookie for this response. If the session - has been modified, the cookie is set. If the session is permanent and - the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is - always set. - - This check is usually skipped if the session was deleted. - - .. versionadded:: 0.11 - """ - - return session.modified or ( - session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] - ) - - def open_session(self, app: Flask, request: Request) -> SessionMixin | None: - """This is called at the beginning of each request, after - pushing the request context, before matching the URL. - - This must return an object which implements a dictionary-like - interface as well as the :class:`SessionMixin` interface. - - This will return ``None`` to indicate that loading failed in - some way that is not immediately an error. The request - context will fall back to using :meth:`make_null_session` - in this case. - """ - raise NotImplementedError() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - """This is called at the end of each request, after generating - a response, before removing the request context. It is skipped - if :meth:`is_null_session` returns ``True``. - """ - raise NotImplementedError() - - -session_json_serializer = TaggedJSONSerializer() - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class SecureCookieSessionInterface(SessionInterface): - """The default session interface that stores sessions in signed cookies - through the :mod:`itsdangerous` module. - """ - - #: the salt that should be applied on top of the secret key for the - #: signing of cookie based sessions. - salt = "cookie-session" - #: the hash function to use for the signature. The default is sha1 - digest_method = staticmethod(_lazy_sha1) - #: the name of the itsdangerous supported key derivation. The default - #: is hmac. - key_derivation = "hmac" - #: A python serializer for the payload. The default is a compact - #: JSON derived serializer with support for some extra Python types - #: such as datetime objects or tuples. - serializer = session_json_serializer - session_class = SecureCookieSession - - def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: - if not app.secret_key: - return None - - keys: list[str | bytes] = [] - - if fallbacks := app.config["SECRET_KEY_FALLBACKS"]: - keys.extend(fallbacks) - - keys.append(app.secret_key) # itsdangerous expects current key at top - return URLSafeTimedSerializer( - keys, # type: ignore[arg-type] - salt=self.salt, - serializer=self.serializer, - signer_kwargs={ - "key_derivation": self.key_derivation, - "digest_method": self.digest_method, - }, - ) - - def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: - s = self.get_signing_serializer(app) - if s is None: - return None - val = request.cookies.get(self.get_cookie_name(app)) - if not val: - return self.session_class() - max_age = int(app.permanent_session_lifetime.total_seconds()) - try: - data = s.loads(val, max_age=max_age) - return self.session_class(data) - except BadSignature: - return self.session_class() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - name = self.get_cookie_name(app) - domain = self.get_cookie_domain(app) - path = self.get_cookie_path(app) - secure = self.get_cookie_secure(app) - partitioned = self.get_cookie_partitioned(app) - samesite = self.get_cookie_samesite(app) - httponly = self.get_cookie_httponly(app) - - # Add a "Vary: Cookie" header if the session was accessed at all. - if session.accessed: - response.vary.add("Cookie") - - # If the session is modified to be empty, remove the cookie. - # If the session is empty, return without setting the cookie. - if not session: - if session.modified: - response.delete_cookie( - name, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - httponly=httponly, - ) - response.vary.add("Cookie") - - return - - if not self.should_set_cookie(app, session): - return - - expires = self.get_expiration_time(app, session) - val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore[union-attr] - response.set_cookie( - name, - val, - expires=expires, - httponly=httponly, - domain=domain, - path=path, - secure=secure, - partitioned=partitioned, - samesite=samesite, - ) - response.vary.add("Cookie") diff --git a/venv/lib/python3.10/site-packages/flask/signals.py b/venv/lib/python3.10/site-packages/flask/signals.py deleted file mode 100644 index 444fda9..0000000 --- a/venv/lib/python3.10/site-packages/flask/signals.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from blinker import Namespace - -# This namespace is only for signals provided by Flask itself. -_signals = Namespace() - -template_rendered = _signals.signal("template-rendered") -before_render_template = _signals.signal("before-render-template") -request_started = _signals.signal("request-started") -request_finished = _signals.signal("request-finished") -request_tearing_down = _signals.signal("request-tearing-down") -got_request_exception = _signals.signal("got-request-exception") -appcontext_tearing_down = _signals.signal("appcontext-tearing-down") -appcontext_pushed = _signals.signal("appcontext-pushed") -appcontext_popped = _signals.signal("appcontext-popped") -message_flashed = _signals.signal("message-flashed") diff --git a/venv/lib/python3.10/site-packages/flask/templating.py b/venv/lib/python3.10/site-packages/flask/templating.py deleted file mode 100644 index 618a3b3..0000000 --- a/venv/lib/python3.10/site-packages/flask/templating.py +++ /dev/null @@ -1,219 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2 import BaseLoader -from jinja2 import Environment as BaseEnvironment -from jinja2 import Template -from jinja2 import TemplateNotFound - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .helpers import stream_with_context -from .signals import before_render_template -from .signals import template_rendered - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .sansio.app import App - from .sansio.scaffold import Scaffold - - -def _default_template_ctx_processor() -> dict[str, t.Any]: - """Default template context processor. Injects `request`, - `session` and `g`. - """ - appctx = _cv_app.get(None) - reqctx = _cv_request.get(None) - rv: dict[str, t.Any] = {} - if appctx is not None: - rv["g"] = appctx.g - if reqctx is not None: - rv["request"] = reqctx.request - rv["session"] = reqctx.session - return rv - - -class Environment(BaseEnvironment): - """Works like a regular Jinja2 environment but has some additional - knowledge of how Flask's blueprint works so that it can prepend the - name of the blueprint to referenced templates if necessary. - """ - - def __init__(self, app: App, **options: t.Any) -> None: - if "loader" not in options: - options["loader"] = app.create_global_jinja_loader() - BaseEnvironment.__init__(self, **options) - self.app = app - - -class DispatchingJinjaLoader(BaseLoader): - """A loader that looks for templates in the application and all - the blueprint folders. - """ - - def __init__(self, app: App) -> None: - self.app = app - - def get_source( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: - return self._get_source_explained(environment, template) - return self._get_source_fast(environment, template) - - def _get_source_explained( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - attempts = [] - rv: tuple[str, str | None, t.Callable[[], bool] | None] | None - trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None - - for srcobj, loader in self._iter_loaders(template): - try: - rv = loader.get_source(environment, template) - if trv is None: - trv = rv - except TemplateNotFound: - rv = None - attempts.append((loader, srcobj, rv)) - - from .debughelpers import explain_template_loading_attempts - - explain_template_loading_attempts(self.app, template, attempts) - - if trv is not None: - return trv - raise TemplateNotFound(template) - - def _get_source_fast( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - for _srcobj, loader in self._iter_loaders(template): - try: - return loader.get_source(environment, template) - except TemplateNotFound: - continue - raise TemplateNotFound(template) - - def _iter_loaders(self, template: str) -> t.Iterator[tuple[Scaffold, BaseLoader]]: - loader = self.app.jinja_loader - if loader is not None: - yield self.app, loader - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - yield blueprint, loader - - def list_templates(self) -> list[str]: - result = set() - loader = self.app.jinja_loader - if loader is not None: - result.update(loader.list_templates()) - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - for template in loader.list_templates(): - result.add(template) - - return list(result) - - -def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - rv = template.render(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - return rv - - -def render_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> str: - """Render a template by name with the given context. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _render(app, template, context) - - -def render_template_string(source: str, **context: t.Any) -> str: - """Render a template from the given source string with the given - context. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _render(app, template, context) - - -def _stream( - app: Flask, template: Template, context: dict[str, t.Any] -) -> t.Iterator[str]: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - def generate() -> t.Iterator[str]: - yield from template.generate(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - rv = generate() - - # If a request context is active, keep it while generating. - if request: - rv = stream_with_context(rv) - - return rv - - -def stream_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> t.Iterator[str]: - """Render a template by name with the given context as a stream. - This returns an iterator of strings, which can be used as a - streaming response from a view. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _stream(app, template, context) - - -def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: - """Render a template from the given source string with the given - context as a stream. This returns an iterator of strings, which can - be used as a streaming response from a view. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _stream(app, template, context) diff --git a/venv/lib/python3.10/site-packages/flask/testing.py b/venv/lib/python3.10/site-packages/flask/testing.py deleted file mode 100644 index da156cc..0000000 --- a/venv/lib/python3.10/site-packages/flask/testing.py +++ /dev/null @@ -1,298 +0,0 @@ -from __future__ import annotations - -import importlib.metadata -import typing as t -from contextlib import contextmanager -from contextlib import ExitStack -from copy import copy -from types import TracebackType -from urllib.parse import urlsplit - -import werkzeug.test -from click.testing import CliRunner -from click.testing import Result -from werkzeug.test import Client -from werkzeug.wrappers import Request as BaseRequest - -from .cli import ScriptInfo -from .sessions import SessionMixin - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - from werkzeug.test import TestResponse - - from .app import Flask - - -class EnvironBuilder(werkzeug.test.EnvironBuilder): - """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the - application. - - :param app: The Flask application to configure the environment from. - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - - def __init__( - self, - app: Flask, - path: str = "/", - base_url: str | None = None, - subdomain: str | None = None, - url_scheme: str | None = None, - *args: t.Any, - **kwargs: t.Any, - ) -> None: - assert not (base_url or subdomain or url_scheme) or ( - base_url is not None - ) != bool(subdomain or url_scheme), ( - 'Cannot pass "subdomain" or "url_scheme" with "base_url".' - ) - - if base_url is None: - http_host = app.config.get("SERVER_NAME") or "localhost" - app_root = app.config["APPLICATION_ROOT"] - - if subdomain: - http_host = f"{subdomain}.{http_host}" - - if url_scheme is None: - url_scheme = app.config["PREFERRED_URL_SCHEME"] - - url = urlsplit(path) - base_url = ( - f"{url.scheme or url_scheme}://{url.netloc or http_host}" - f"/{app_root.lstrip('/')}" - ) - path = url.path - - if url.query: - path = f"{path}?{url.query}" - - self.app = app - super().__init__(path, base_url, *args, **kwargs) - - def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: # type: ignore - """Serialize ``obj`` to a JSON-formatted string. - - The serialization will be configured according to the config associated - with this EnvironBuilder's ``app``. - """ - return self.app.json.dumps(obj, **kwargs) - - -_werkzeug_version = "" - - -def _get_werkzeug_version() -> str: - global _werkzeug_version - - if not _werkzeug_version: - _werkzeug_version = importlib.metadata.version("werkzeug") - - return _werkzeug_version - - -class FlaskClient(Client): - """Works like a regular Werkzeug test client but has knowledge about - Flask's contexts to defer the cleanup of the request context until - the end of a ``with`` block. For general information about how to - use this class refer to :class:`werkzeug.test.Client`. - - .. versionchanged:: 0.12 - `app.test_client()` includes preset default environment, which can be - set after instantiation of the `app.test_client()` object in - `client.environ_base`. - - Basic usage is outlined in the :doc:`/testing` chapter. - """ - - application: Flask - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - super().__init__(*args, **kwargs) - self.preserve_context = False - self._new_contexts: list[t.ContextManager[t.Any]] = [] - self._context_stack = ExitStack() - self.environ_base = { - "REMOTE_ADDR": "127.0.0.1", - "HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}", - } - - @contextmanager - def session_transaction( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Iterator[SessionMixin]: - """When used in combination with a ``with`` statement this opens a - session transaction. This can be used to modify the session that - the test client uses. Once the ``with`` block is left the session is - stored back. - - :: - - with client.session_transaction() as session: - session['value'] = 42 - - Internally this is implemented by going through a temporary test - request context and since session handling could depend on - request variables this function accepts the same arguments as - :meth:`~flask.Flask.test_request_context` which are directly - passed through. - """ - if self._cookies is None: - raise TypeError( - "Cookies are disabled. Create a client with 'use_cookies=True'." - ) - - app = self.application - ctx = app.test_request_context(*args, **kwargs) - self._add_cookies_to_wsgi(ctx.request.environ) - - with ctx: - sess = app.session_interface.open_session(app, ctx.request) - - if sess is None: - raise RuntimeError("Session backend did not open a session.") - - yield sess - resp = app.response_class() - - if app.session_interface.is_null_session(sess): - return - - with ctx: - app.session_interface.save_session(app, sess, resp) - - self._update_cookies_from_response( - ctx.request.host.partition(":")[0], - ctx.request.path, - resp.headers.getlist("Set-Cookie"), - ) - - def _copy_environ(self, other: WSGIEnvironment) -> WSGIEnvironment: - out = {**self.environ_base, **other} - - if self.preserve_context: - out["werkzeug.debug.preserve_context"] = self._new_contexts.append - - return out - - def _request_from_builder_args( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> BaseRequest: - kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) - builder = EnvironBuilder(self.application, *args, **kwargs) - - try: - return builder.get_request() - finally: - builder.close() - - def open( - self, - *args: t.Any, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> TestResponse: - if args and isinstance( - args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) - ): - if isinstance(args[0], werkzeug.test.EnvironBuilder): - builder = copy(args[0]) - builder.environ_base = self._copy_environ(builder.environ_base or {}) # type: ignore[arg-type] - request = builder.get_request() - elif isinstance(args[0], dict): - request = EnvironBuilder.from_environ( - args[0], app=self.application, environ_base=self._copy_environ({}) - ).get_request() - else: - # isinstance(args[0], BaseRequest) - request = copy(args[0]) - request.environ = self._copy_environ(request.environ) - else: - # request is None - request = self._request_from_builder_args(args, kwargs) - - # Pop any previously preserved contexts. This prevents contexts - # from being preserved across redirects or multiple requests - # within a single block. - self._context_stack.close() - - response = super().open( - request, - buffered=buffered, - follow_redirects=follow_redirects, - ) - response.json_module = self.application.json # type: ignore[assignment] - - # Re-push contexts that were preserved during the request. - while self._new_contexts: - cm = self._new_contexts.pop() - self._context_stack.enter_context(cm) - - return response - - def __enter__(self) -> FlaskClient: - if self.preserve_context: - raise RuntimeError("Cannot nest client invocations") - self.preserve_context = True - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.preserve_context = False - self._context_stack.close() - - -class FlaskCliRunner(CliRunner): - """A :class:`~click.testing.CliRunner` for testing a Flask app's - CLI commands. Typically created using - :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. - """ - - def __init__(self, app: Flask, **kwargs: t.Any) -> None: - self.app = app - super().__init__(**kwargs) - - def invoke( # type: ignore - self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any - ) -> Result: - """Invokes a CLI command in an isolated environment. See - :meth:`CliRunner.invoke ` for - full method documentation. See :ref:`testing-cli` for examples. - - If the ``obj`` argument is not given, passes an instance of - :class:`~flask.cli.ScriptInfo` that knows how to load the Flask - app being tested. - - :param cli: Command object to invoke. Default is the app's - :attr:`~flask.app.Flask.cli` group. - :param args: List of strings to invoke the command with. - - :return: a :class:`~click.testing.Result` object. - """ - if cli is None: - cli = self.app.cli - - if "obj" not in kwargs: - kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) - - return super().invoke(cli, args, **kwargs) diff --git a/venv/lib/python3.10/site-packages/flask/typing.py b/venv/lib/python3.10/site-packages/flask/typing.py deleted file mode 100644 index 6b70c40..0000000 --- a/venv/lib/python3.10/site-packages/flask/typing.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIApplication # noqa: F401 - from werkzeug.datastructures import Headers # noqa: F401 - from werkzeug.sansio.response import Response # noqa: F401 - -# The possible types that are directly convertible or are a Response object. -ResponseValue = t.Union[ - "Response", - str, - bytes, - list[t.Any], - # Only dict is actually accepted, but Mapping allows for TypedDict. - t.Mapping[str, t.Any], - t.Iterator[str], - t.Iterator[bytes], - cabc.AsyncIterable[str], # for Quart, until App is generic. - cabc.AsyncIterable[bytes], -] - -# the possible types for an individual HTTP header -# This should be a Union, but mypy doesn't pass unless it's a TypeVar. -HeaderValue = t.Union[str, list[str], tuple[str, ...]] - -# the possible types for HTTP headers -HeadersValue = t.Union[ - "Headers", - t.Mapping[str, HeaderValue], - t.Sequence[tuple[str, HeaderValue]], -] - -# The possible types returned by a route function. -ResponseReturnValue = t.Union[ - ResponseValue, - tuple[ResponseValue, HeadersValue], - tuple[ResponseValue, int], - tuple[ResponseValue, int, HeadersValue], - "WSGIApplication", -] - -# Allow any subclass of werkzeug.Response, such as the one from Flask, -# as a callback argument. Using werkzeug.Response directly makes a -# callback annotated with flask.Response fail type checking. -ResponseClass = t.TypeVar("ResponseClass", bound="Response") - -AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named -AfterRequestCallable = t.Union[ - t.Callable[[ResponseClass], ResponseClass], - t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], -] -BeforeFirstRequestCallable = t.Union[ - t.Callable[[], None], t.Callable[[], t.Awaitable[None]] -] -BeforeRequestCallable = t.Union[ - t.Callable[[], t.Optional[ResponseReturnValue]], - t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], -] -ShellContextProcessorCallable = t.Callable[[], dict[str, t.Any]] -TeardownCallable = t.Union[ - t.Callable[[t.Optional[BaseException]], None], - t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], -] -TemplateContextProcessorCallable = t.Union[ - t.Callable[[], dict[str, t.Any]], - t.Callable[[], t.Awaitable[dict[str, t.Any]]], -] -TemplateFilterCallable = t.Callable[..., t.Any] -TemplateGlobalCallable = t.Callable[..., t.Any] -TemplateTestCallable = t.Callable[..., bool] -URLDefaultCallable = t.Callable[[str, dict[str, t.Any]], None] -URLValuePreprocessorCallable = t.Callable[ - [t.Optional[str], t.Optional[dict[str, t.Any]]], None -] - -# This should take Exception, but that either breaks typing the argument -# with a specific exception, or decorating multiple times with different -# exceptions (and using a union type on the argument). -# https://github.com/pallets/flask/issues/4095 -# https://github.com/pallets/flask/issues/4295 -# https://github.com/pallets/flask/issues/4297 -ErrorHandlerCallable = t.Union[ - t.Callable[[t.Any], ResponseReturnValue], - t.Callable[[t.Any], t.Awaitable[ResponseReturnValue]], -] - -RouteCallable = t.Union[ - t.Callable[..., ResponseReturnValue], - t.Callable[..., t.Awaitable[ResponseReturnValue]], -] diff --git a/venv/lib/python3.10/site-packages/flask/views.py b/venv/lib/python3.10/site-packages/flask/views.py deleted file mode 100644 index 53fe976..0000000 --- a/venv/lib/python3.10/site-packages/flask/views.py +++ /dev/null @@ -1,191 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import typing as ft -from .globals import current_app -from .globals import request - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -http_method_funcs = frozenset( - ["get", "post", "head", "options", "delete", "put", "trace", "patch"] -) - - -class View: - """Subclass this class and override :meth:`dispatch_request` to - create a generic class-based view. Call :meth:`as_view` to create a - view function that creates an instance of the class with the given - arguments and calls its ``dispatch_request`` method with any URL - variables. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class Hello(View): - init_every_request = False - - def dispatch_request(self, name): - return f"Hello, {name}!" - - app.add_url_rule( - "/hello/", view_func=Hello.as_view("hello") - ) - - Set :attr:`methods` on the class to change what methods the view - accepts. - - Set :attr:`decorators` on the class to apply a list of decorators to - the generated view function. Decorators applied to the class itself - will not be applied to the generated view function! - - Set :attr:`init_every_request` to ``False`` for efficiency, unless - you need to store request-global data on ``self``. - """ - - #: The methods this view is registered for. Uses the same default - #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and - #: ``add_url_rule`` by default. - methods: t.ClassVar[t.Collection[str] | None] = None - - #: Control whether the ``OPTIONS`` method is handled automatically. - #: Uses the same default (``True``) as ``route`` and - #: ``add_url_rule`` by default. - provide_automatic_options: t.ClassVar[bool | None] = None - - #: A list of decorators to apply, in order, to the generated view - #: function. Remember that ``@decorator`` syntax is applied bottom - #: to top, so the first decorator in the list would be the bottom - #: decorator. - #: - #: .. versionadded:: 0.8 - decorators: t.ClassVar[list[t.Callable[..., t.Any]]] = [] - - #: Create a new instance of this view class for every request by - #: default. If a view subclass sets this to ``False``, the same - #: instance is used for every request. - #: - #: A single instance is more efficient, especially if complex setup - #: is done during init. However, storing data on ``self`` is no - #: longer safe across requests, and :data:`~flask.g` should be used - #: instead. - #: - #: .. versionadded:: 2.2 - init_every_request: t.ClassVar[bool] = True - - def dispatch_request(self) -> ft.ResponseReturnValue: - """The actual view function behavior. Subclasses must override - this and return a valid response. Any variables from the URL - rule are passed as keyword arguments. - """ - raise NotImplementedError() - - @classmethod - def as_view( - cls, name: str, *class_args: t.Any, **class_kwargs: t.Any - ) -> ft.RouteCallable: - """Convert the class into a view function that can be registered - for a route. - - By default, the generated view will create a new instance of the - view class for every request and call its - :meth:`dispatch_request` method. If the view class sets - :attr:`init_every_request` to ``False``, the same instance will - be used for every request. - - Except for ``name``, all other arguments passed to this method - are forwarded to the view class ``__init__`` method. - - .. versionchanged:: 2.2 - Added the ``init_every_request`` class attribute. - """ - if cls.init_every_request: - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - self = view.view_class( # type: ignore[attr-defined] - *class_args, **class_kwargs - ) - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - else: - self = cls(*class_args, **class_kwargs) # pyright: ignore - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - if cls.decorators: - view.__name__ = name - view.__module__ = cls.__module__ - for decorator in cls.decorators: - view = decorator(view) - - # We attach the view class to the view function for two reasons: - # first of all it allows us to easily figure out what class-based - # view this thing came from, secondly it's also used for instantiating - # the view class so you can actually replace it with something else - # for testing purposes and debugging. - view.view_class = cls # type: ignore - view.__name__ = name - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.methods = cls.methods # type: ignore - view.provide_automatic_options = cls.provide_automatic_options # type: ignore - return view - - -class MethodView(View): - """Dispatches request methods to the corresponding instance methods. - For example, if you implement a ``get`` method, it will be used to - handle ``GET`` requests. - - This can be useful for defining a REST API. - - :attr:`methods` is automatically set based on the methods defined on - the class. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class CounterAPI(MethodView): - def get(self): - return str(session.get("counter", 0)) - - def post(self): - session["counter"] = session.get("counter", 0) + 1 - return redirect(url_for("counter")) - - app.add_url_rule( - "/counter", view_func=CounterAPI.as_view("counter") - ) - """ - - def __init_subclass__(cls, **kwargs: t.Any) -> None: - super().__init_subclass__(**kwargs) - - if "methods" not in cls.__dict__: - methods = set() - - for base in cls.__bases__: - if getattr(base, "methods", None): - methods.update(base.methods) # type: ignore[attr-defined] - - for key in http_method_funcs: - if hasattr(cls, key): - methods.add(key.upper()) - - if methods: - cls.methods = methods - - def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: - meth = getattr(self, request.method.lower(), None) - - # If the request method is HEAD and we don't have a handler for it - # retry with GET. - if meth is None and request.method == "HEAD": - meth = getattr(self, "get", None) - - assert meth is not None, f"Unimplemented method {request.method!r}" - return current_app.ensure_sync(meth)(**kwargs) # type: ignore[no-any-return] diff --git a/venv/lib/python3.10/site-packages/flask/wrappers.py b/venv/lib/python3.10/site-packages/flask/wrappers.py deleted file mode 100644 index bab6102..0000000 --- a/venv/lib/python3.10/site-packages/flask/wrappers.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import annotations - -import typing as t - -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import HTTPException -from werkzeug.wrappers import Request as RequestBase -from werkzeug.wrappers import Response as ResponseBase - -from . import json -from .globals import current_app -from .helpers import _split_blueprint_path - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.routing import Rule - - -class Request(RequestBase): - """The request object used by default in Flask. Remembers the - matched endpoint and view arguments. - - It is what ends up as :class:`~flask.request`. If you want to replace - the request object used you can subclass this and set - :attr:`~flask.Flask.request_class` to your subclass. - - The request object is a :class:`~werkzeug.wrappers.Request` subclass and - provides all of the attributes Werkzeug defines plus a few Flask - specific ones. - """ - - json_module: t.Any = json - - #: The internal URL rule that matched the request. This can be - #: useful to inspect which methods are allowed for the URL from - #: a before/after handler (``request.url_rule.methods``) etc. - #: Though if the request's method was invalid for the URL rule, - #: the valid list is available in ``routing_exception.valid_methods`` - #: instead (an attribute of the Werkzeug exception - #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) - #: because the request was never internally bound. - #: - #: .. versionadded:: 0.6 - url_rule: Rule | None = None - - #: A dict of view arguments that matched the request. If an exception - #: happened when matching, this will be ``None``. - view_args: dict[str, t.Any] | None = None - - #: If matching the URL failed, this is the exception that will be - #: raised / was raised as part of the request handling. This is - #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or - #: something similar. - routing_exception: HTTPException | None = None - - _max_content_length: int | None = None - _max_form_memory_size: int | None = None - _max_form_parts: int | None = None - - @property - def max_content_length(self) -> int | None: - """The maximum number of bytes that will be read during this request. If - this limit is exceeded, a 413 :exc:`~werkzeug.exceptions.RequestEntityTooLarge` - error is raised. If it is set to ``None``, no limit is enforced at the - Flask application level. However, if it is ``None`` and the request has - no ``Content-Length`` header and the WSGI server does not indicate that - it terminates the stream, then no data is read to avoid an infinite - stream. - - Each request defaults to the :data:`MAX_CONTENT_LENGTH` config, which - defaults to ``None``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This can be set per-request. - - .. versionchanged:: 0.6 - This is configurable through Flask config. - """ - if self._max_content_length is not None: - return self._max_content_length - - if not current_app: - return super().max_content_length - - return current_app.config["MAX_CONTENT_LENGTH"] # type: ignore[no-any-return] - - @max_content_length.setter - def max_content_length(self, value: int | None) -> None: - self._max_content_length = value - - @property - def max_form_memory_size(self) -> int | None: - """The maximum size in bytes any non-file form field may be in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_MEMORY_SIZE` config, which - defaults to ``500_000``. It can be set on a specific ``request`` to - apply the limit to that specific view. This should be set appropriately - based on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_memory_size is not None: - return self._max_form_memory_size - - if not current_app: - return super().max_form_memory_size - - return current_app.config["MAX_FORM_MEMORY_SIZE"] # type: ignore[no-any-return] - - @max_form_memory_size.setter - def max_form_memory_size(self, value: int | None) -> None: - self._max_form_memory_size = value - - @property # type: ignore[override] - def max_form_parts(self) -> int | None: - """The maximum number of fields that may be present in a - ``multipart/form-data`` body. If this limit is exceeded, a 413 - :exc:`~werkzeug.exceptions.RequestEntityTooLarge` error is raised. If it - is set to ``None``, no limit is enforced at the Flask application level. - - Each request defaults to the :data:`MAX_FORM_PARTS` config, which - defaults to ``1_000``. It can be set on a specific ``request`` to apply - the limit to that specific view. This should be set appropriately based - on an application's or view's specific needs. - - .. versionchanged:: 3.1 - This is configurable through Flask config. - """ - if self._max_form_parts is not None: - return self._max_form_parts - - if not current_app: - return super().max_form_parts - - return current_app.config["MAX_FORM_PARTS"] # type: ignore[no-any-return] - - @max_form_parts.setter - def max_form_parts(self, value: int | None) -> None: - self._max_form_parts = value - - @property - def endpoint(self) -> str | None: - """The endpoint that matched the request URL. - - This will be ``None`` if matching failed or has not been - performed yet. - - This in combination with :attr:`view_args` can be used to - reconstruct the same URL or a modified URL. - """ - if self.url_rule is not None: - return self.url_rule.endpoint # type: ignore[no-any-return] - - return None - - @property - def blueprint(self) -> str | None: - """The registered name of the current blueprint. - - This will be ``None`` if the endpoint is not part of a - blueprint, or if URL matching failed or has not been performed - yet. - - This does not necessarily match the name the blueprint was - created with. It may have been nested, or registered with a - different name. - """ - endpoint = self.endpoint - - if endpoint is not None and "." in endpoint: - return endpoint.rpartition(".")[0] - - return None - - @property - def blueprints(self) -> list[str]: - """The registered names of the current blueprint upwards through - parent blueprints. - - This will be an empty list if there is no current blueprint, or - if URL matching failed. - - .. versionadded:: 2.0.1 - """ - name = self.blueprint - - if name is None: - return [] - - return _split_blueprint_path(name) - - def _load_form_data(self) -> None: - super()._load_form_data() - - # In debug mode we're replacing the files multidict with an ad-hoc - # subclass that raises a different error for key errors. - if ( - current_app - and current_app.debug - and self.mimetype != "multipart/form-data" - and not self.files - ): - from .debughelpers import attach_enctype_error_multidict - - attach_enctype_error_multidict(self) - - def on_json_loading_failed(self, e: ValueError | None) -> t.Any: - try: - return super().on_json_loading_failed(e) - except BadRequest as ebr: - if current_app and current_app.debug: - raise - - raise BadRequest() from ebr - - -class Response(ResponseBase): - """The response object that is used by default in Flask. Works like the - response object from Werkzeug but is set to have an HTML mimetype by - default. Quite often you don't have to create this object yourself because - :meth:`~flask.Flask.make_response` will take care of that for you. - - If you want to replace the response object used you can subclass this and - set :attr:`~flask.Flask.response_class` to your subclass. - - .. versionchanged:: 1.0 - JSON support is added to the response, like the request. This is useful - when testing to get the test client response data as JSON. - - .. versionchanged:: 1.0 - - Added :attr:`max_cookie_size`. - """ - - default_mimetype: str | None = "text/html" - - json_module = json - - autocorrect_location_header = False - - @property - def max_cookie_size(self) -> int: # type: ignore - """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. - - See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in - Werkzeug's docs. - """ - if current_app: - return current_app.config["MAX_COOKIE_SIZE"] # type: ignore[no-any-return] - - # return Werkzeug's default when not in an app context - return super().max_cookie_size diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/LICENSE deleted file mode 100644 index 0dca6e1..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -2009-2024 (c) Benoît Chesneau -2009-2015 (c) Paul J. Davis - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/METADATA deleted file mode 100644 index 550aef2..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/METADATA +++ /dev/null @@ -1,130 +0,0 @@ -Metadata-Version: 2.1 -Name: gunicorn -Version: 23.0.0 -Summary: WSGI HTTP Server for UNIX -Author-email: Benoit Chesneau -License: MIT -Project-URL: Homepage, https://gunicorn.org -Project-URL: Documentation, https://docs.gunicorn.org -Project-URL: Issue tracker, https://github.com/benoitc/gunicorn/issues -Project-URL: Source code, https://github.com/benoitc/gunicorn -Project-URL: Changelog, https://docs.gunicorn.org/en/stable/news.html -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Other Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet -Classifier: Topic :: Utilities -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: packaging -Requires-Dist: importlib-metadata ; python_version < "3.8" -Provides-Extra: eventlet -Requires-Dist: eventlet !=0.36.0,>=0.24.1 ; extra == 'eventlet' -Provides-Extra: gevent -Requires-Dist: gevent >=1.4.0 ; extra == 'gevent' -Provides-Extra: gthread -Provides-Extra: setproctitle -Requires-Dist: setproctitle ; extra == 'setproctitle' -Provides-Extra: testing -Requires-Dist: gevent ; extra == 'testing' -Requires-Dist: eventlet ; extra == 'testing' -Requires-Dist: coverage ; extra == 'testing' -Requires-Dist: pytest ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Provides-Extra: tornado -Requires-Dist: tornado >=0.2 ; extra == 'tornado' - -Gunicorn --------- - -.. image:: https://img.shields.io/pypi/v/gunicorn.svg?style=flat - :alt: PyPI version - :target: https://pypi.python.org/pypi/gunicorn - -.. image:: https://img.shields.io/pypi/pyversions/gunicorn.svg - :alt: Supported Python versions - :target: https://pypi.python.org/pypi/gunicorn - -.. image:: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml/badge.svg - :alt: Build Status - :target: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml - -.. image:: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml/badge.svg - :alt: Lint Status - :target: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml - -Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork -worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly -compatible with various web frameworks, simply implemented, light on server -resource usage, and fairly speedy. - -Feel free to join us in `#gunicorn`_ on `Libera.chat`_. - -Documentation -------------- - -The documentation is hosted at https://docs.gunicorn.org. - -Installation ------------- - -Gunicorn requires **Python 3.x >= 3.7**. - -Install from PyPI:: - - $ pip install gunicorn - - -Usage ------ - -Basic usage:: - - $ gunicorn [OPTIONS] APP_MODULE - -Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The -module name can be a full dotted path. The variable name refers to a WSGI -callable that should be found in the specified module. - -Example with test app:: - - $ cd examples - $ gunicorn --workers=2 test:app - - -Contributing ------------- - -See `our complete contributor's guide `_ for more details. - - -License -------- - -Gunicorn is released under the MIT License. See the LICENSE_ file for more -details. - -.. _Unicorn: https://bogomips.org/unicorn/ -.. _`#gunicorn`: https://web.libera.chat/?channels=#gunicorn -.. _`Libera.chat`: https://libera.chat/ -.. _LICENSE: https://github.com/benoitc/gunicorn/blob/master/LICENSE diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/RECORD deleted file mode 100644 index 904711e..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/RECORD +++ /dev/null @@ -1,77 +0,0 @@ -../../../bin/gunicorn,sha256=DuzCC47HKcmt6HxG072bKSLwmGafn_xQBH8wSTkAqvI,244 -gunicorn-23.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -gunicorn-23.0.0.dist-info/LICENSE,sha256=ZkbNu6LpnjQh3RjCIXNXmh_eNH6DHa5q3ugO7-Mx6VE,1136 -gunicorn-23.0.0.dist-info/METADATA,sha256=KhY-mRcAcWCLIbXIHihsUNKWB5fGDOrsbq-JKQTBHY4,4421 -gunicorn-23.0.0.dist-info/RECORD,, -gunicorn-23.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -gunicorn-23.0.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91 -gunicorn-23.0.0.dist-info/entry_points.txt,sha256=bF8VNiG4H8W83JfEBcqcPMydv9hl04CS4kwh1KOYrFY,113 -gunicorn-23.0.0.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9 -gunicorn/__init__.py,sha256=NaLW_JTiKLgqMXipjqzxFn-1wdiptlO2WxOB_KKwx94,257 -gunicorn/__main__.py,sha256=tviepyuwKyB6SPV28t2eZy_5PcCpT56z7QZjzbMpkQw,338 -gunicorn/__pycache__/__init__.cpython-310.pyc,, -gunicorn/__pycache__/__main__.cpython-310.pyc,, -gunicorn/__pycache__/arbiter.cpython-310.pyc,, -gunicorn/__pycache__/config.cpython-310.pyc,, -gunicorn/__pycache__/debug.cpython-310.pyc,, -gunicorn/__pycache__/errors.cpython-310.pyc,, -gunicorn/__pycache__/glogging.cpython-310.pyc,, -gunicorn/__pycache__/pidfile.cpython-310.pyc,, -gunicorn/__pycache__/reloader.cpython-310.pyc,, -gunicorn/__pycache__/sock.cpython-310.pyc,, -gunicorn/__pycache__/systemd.cpython-310.pyc,, -gunicorn/__pycache__/util.cpython-310.pyc,, -gunicorn/app/__init__.py,sha256=8m9lIbhRssnbGuBeQUA-vNSNbMeNju9Q_PUnnNfqOYU,105 -gunicorn/app/__pycache__/__init__.cpython-310.pyc,, -gunicorn/app/__pycache__/base.cpython-310.pyc,, -gunicorn/app/__pycache__/pasterapp.cpython-310.pyc,, -gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc,, -gunicorn/app/base.py,sha256=KV2aIO50JTlakHL82q9zu3LhCJrDmUmaViwSy14Gk6U,7370 -gunicorn/app/pasterapp.py,sha256=BIa0mz_J86NuObUw2UIyjLYKUm8V3b034pJrTkvF-sA,2016 -gunicorn/app/wsgiapp.py,sha256=gVBgUc_3uSK0QzXYQ1XbutacEGjf44CgxAaYkgwfucY,1924 -gunicorn/arbiter.py,sha256=xcHpv8bsrYpIpu9q7YK4ue11f9kmz80dr7BUwKX3oxk,21470 -gunicorn/config.py,sha256=t3BChwMoBZwfV05Iy_n3oh232xvi1SORkOJfHFL_c-8,70318 -gunicorn/debug.py,sha256=c8cQv_g3d22JE6A4hv7FNmMhm4wq6iB_E-toorpqJcw,2263 -gunicorn/errors.py,sha256=iLTJQC4SVSRoygIGGHXvEp0d8UdzpeqmMRqUcF0JI14,897 -gunicorn/glogging.py,sha256=76MlUUc82FqdeD3R4qC8NeUHt8vxa3IBSxmeBtbZKtE,15273 -gunicorn/http/__init__.py,sha256=1k_WWvjT9eDDRDOutzXCebvYKm_qzaQA3GuLk0VkbJI,255 -gunicorn/http/__pycache__/__init__.cpython-310.pyc,, -gunicorn/http/__pycache__/body.cpython-310.pyc,, -gunicorn/http/__pycache__/errors.cpython-310.pyc,, -gunicorn/http/__pycache__/message.cpython-310.pyc,, -gunicorn/http/__pycache__/parser.cpython-310.pyc,, -gunicorn/http/__pycache__/unreader.cpython-310.pyc,, -gunicorn/http/__pycache__/wsgi.cpython-310.pyc,, -gunicorn/http/body.py,sha256=sQgp_hJUjx8DK6LYzklMTl-xKcX8efsbreCKzowCGmo,7600 -gunicorn/http/errors.py,sha256=6tcG9pCvRiooXpfudQBILzUPx3ertuQ5utjZeUNMUqA,3437 -gunicorn/http/message.py,sha256=ok4xnqWhntIn21gcPa1KYZWRYTbwsECpot-Eac47qFs,17632 -gunicorn/http/parser.py,sha256=wayoAFjQYERSwE4YGwI2AYSNGZ2eTNbGUtoqqQFph5U,1334 -gunicorn/http/unreader.py,sha256=D7bluz62A1aLZQ9XbpX0-nDBal9KPtp_pjokk2YNY8E,1913 -gunicorn/http/wsgi.py,sha256=x-zTT7gvRF4wipmvoVePz1qO407JZCU_sNU8yjcl_R4,12811 -gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -gunicorn/instrument/__pycache__/__init__.cpython-310.pyc,, -gunicorn/instrument/__pycache__/statsd.cpython-310.pyc,, -gunicorn/instrument/statsd.py,sha256=ghmaniNEjMMLvvdQkDPpB_u9a8z4FBfWUE_C9O1KIYQ,4750 -gunicorn/pidfile.py,sha256=HntiveG8eJmwB8_D3o5cBXRuGKnC0cvWxg90MWh1hUc,2327 -gunicorn/reloader.py,sha256=oDuK2PWGyIMm0_vc1y196Z1EggOvBi-Iz_2UbRY7PsQ,3761 -gunicorn/sock.py,sha256=VVF2eeoxQEJ2OEoZoek3BFZTqj7wXvQql7jpdFAjVTI,6834 -gunicorn/systemd.py,sha256=DmWbcqeRyHdAIy70UCEg2J93v6PpESp3EFTNm0Djgyg,2498 -gunicorn/util.py,sha256=YqC4E3RxhFNH-W4LOqy1RtxcHRy9hRyYND92ZSNXEwc,19095 -gunicorn/workers/__init__.py,sha256=Y0Z6WhXKY6PuTbFkOkeEBzIfhDDg5FeqVg8aJp6lIZA,572 -gunicorn/workers/__pycache__/__init__.cpython-310.pyc,, -gunicorn/workers/__pycache__/base.cpython-310.pyc,, -gunicorn/workers/__pycache__/base_async.cpython-310.pyc,, -gunicorn/workers/__pycache__/geventlet.cpython-310.pyc,, -gunicorn/workers/__pycache__/ggevent.cpython-310.pyc,, -gunicorn/workers/__pycache__/gthread.cpython-310.pyc,, -gunicorn/workers/__pycache__/gtornado.cpython-310.pyc,, -gunicorn/workers/__pycache__/sync.cpython-310.pyc,, -gunicorn/workers/__pycache__/workertmp.cpython-310.pyc,, -gunicorn/workers/base.py,sha256=eM9MTLP9PdWL0Pm5V5byyBli-r8zF2MSEGjefr3y92M,9763 -gunicorn/workers/base_async.py,sha256=Oc-rSV81uHqvEqww2PM6tz75qNR07ChuqM6IkTOpzlk,5627 -gunicorn/workers/geventlet.py,sha256=s_I-gKYgDJnlAHdCxN_wfglODnDE1eJaZJZCJyNYg-4,6069 -gunicorn/workers/ggevent.py,sha256=OEhj-bFVBGQ-jbjr5S3gSvixJTa-YOQYht7fYTOCyt4,6030 -gunicorn/workers/gthread.py,sha256=moycCQoJS602u3U7gZEooYxqRP86Tq5bmQnipL4a4_c,12500 -gunicorn/workers/gtornado.py,sha256=zCHbxs5JeE9rtZa5mXlhftBlNlwp_tBWXuTQwqgv1so,5811 -gunicorn/workers/sync.py,sha256=mOY84VHbAx62lmo2DLuifkK9d6anEgvC7LAuYVJyRM4,7204 -gunicorn/workers/workertmp.py,sha256=bswGosCIDb_wBfdGaFqHopgxbmJ6rgVXYlVhJDWZKIc,1604 diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/WHEEL deleted file mode 100644 index 1a9c535..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (72.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt deleted file mode 100644 index fd14749..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -gunicorn = gunicorn.app.wsgiapp:run - -[paste.server_runner] -main = gunicorn.app.pasterapp:serve diff --git a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/top_level.txt deleted file mode 100644 index 8f22dcc..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn-23.0.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -gunicorn diff --git a/venv/lib/python3.10/site-packages/gunicorn/__init__.py b/venv/lib/python3.10/site-packages/gunicorn/__init__.py deleted file mode 100644 index cdcd135..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -version_info = (23, 0, 0) -__version__ = ".".join([str(v) for v in version_info]) -SERVER = "gunicorn" -SERVER_SOFTWARE = "%s/%s" % (SERVER, __version__) diff --git a/venv/lib/python3.10/site-packages/gunicorn/__main__.py b/venv/lib/python3.10/site-packages/gunicorn/__main__.py deleted file mode 100644 index ceb44d0..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/__main__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -from gunicorn.app.wsgiapp import run - -if __name__ == "__main__": - # see config.py - argparse defaults to basename(argv[0]) == "__main__.py" - # todo: let runpy.run_module take care of argv[0] rewriting - run(prog="gunicorn") diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index bc7e55a..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 5379185..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc deleted file mode 100644 index 1d75bfc..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/arbiter.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/config.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/config.cpython-310.pyc deleted file mode 100644 index 7b2dc31..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/config.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc deleted file mode 100644 index c9feaf2..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/debug.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc deleted file mode 100644 index d167928..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/errors.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc deleted file mode 100644 index 18eb626..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/glogging.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc deleted file mode 100644 index 079adbb..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/pidfile.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc deleted file mode 100644 index 93ddc32..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/reloader.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc deleted file mode 100644 index 0e98cec..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/sock.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc deleted file mode 100644 index a0beda3..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/systemd.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/util.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/__pycache__/util.cpython-310.pyc deleted file mode 100644 index f1ca2c2..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/__pycache__/util.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/__init__.py b/venv/lib/python3.10/site-packages/gunicorn/app/__init__.py deleted file mode 100644 index 530e35c..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/app/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 1d8481a..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc deleted file mode 100644 index 25ed05e..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc deleted file mode 100644 index b38ae99..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc deleted file mode 100644 index b559469..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/base.py b/venv/lib/python3.10/site-packages/gunicorn/app/base.py deleted file mode 100644 index 9bf7a4f..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/app/base.py +++ /dev/null @@ -1,235 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. -import importlib.util -import importlib.machinery -import os -import sys -import traceback - -from gunicorn import util -from gunicorn.arbiter import Arbiter -from gunicorn.config import Config, get_default_config_file -from gunicorn import debug - - -class BaseApplication: - """ - An application interface for configuring and loading - the various necessities for any given web framework. - """ - def __init__(self, usage=None, prog=None): - self.usage = usage - self.cfg = None - self.callable = None - self.prog = prog - self.logger = None - self.do_load_config() - - def do_load_config(self): - """ - Loads the configuration - """ - try: - self.load_default_config() - self.load_config() - except Exception as e: - print("\nError: %s" % str(e), file=sys.stderr) - sys.stderr.flush() - sys.exit(1) - - def load_default_config(self): - # init configuration - self.cfg = Config(self.usage, prog=self.prog) - - def init(self, parser, opts, args): - raise NotImplementedError - - def load(self): - raise NotImplementedError - - def load_config(self): - """ - This method is used to load the configuration from one or several input(s). - Custom Command line, configuration file. - You have to override this method in your class. - """ - raise NotImplementedError - - def reload(self): - self.do_load_config() - if self.cfg.spew: - debug.spew() - - def wsgi(self): - if self.callable is None: - self.callable = self.load() - return self.callable - - def run(self): - try: - Arbiter(self).run() - except RuntimeError as e: - print("\nError: %s\n" % e, file=sys.stderr) - sys.stderr.flush() - sys.exit(1) - - -class Application(BaseApplication): - - # 'init' and 'load' methods are implemented by WSGIApplication. - # pylint: disable=abstract-method - - def chdir(self): - # chdir to the configured path before loading, - # default is the current dir - os.chdir(self.cfg.chdir) - - # add the path to sys.path - if self.cfg.chdir not in sys.path: - sys.path.insert(0, self.cfg.chdir) - - def get_config_from_filename(self, filename): - - if not os.path.exists(filename): - raise RuntimeError("%r doesn't exist" % filename) - - ext = os.path.splitext(filename)[1] - - try: - module_name = '__config__' - if ext in [".py", ".pyc"]: - spec = importlib.util.spec_from_file_location(module_name, filename) - else: - msg = "configuration file should have a valid Python extension.\n" - util.warn(msg) - loader_ = importlib.machinery.SourceFileLoader(module_name, filename) - spec = importlib.util.spec_from_file_location(module_name, filename, loader=loader_) - mod = importlib.util.module_from_spec(spec) - sys.modules[module_name] = mod - spec.loader.exec_module(mod) - except Exception: - print("Failed to read config file: %s" % filename, file=sys.stderr) - traceback.print_exc() - sys.stderr.flush() - sys.exit(1) - - return vars(mod) - - def get_config_from_module_name(self, module_name): - return vars(importlib.import_module(module_name)) - - def load_config_from_module_name_or_filename(self, location): - """ - Loads the configuration file: the file is a python file, otherwise raise an RuntimeError - Exception or stop the process if the configuration file contains a syntax error. - """ - - if location.startswith("python:"): - module_name = location[len("python:"):] - cfg = self.get_config_from_module_name(module_name) - else: - if location.startswith("file:"): - filename = location[len("file:"):] - else: - filename = location - cfg = self.get_config_from_filename(filename) - - for k, v in cfg.items(): - # Ignore unknown names - if k not in self.cfg.settings: - continue - try: - self.cfg.set(k.lower(), v) - except Exception: - print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr) - sys.stderr.flush() - raise - - return cfg - - def load_config_from_file(self, filename): - return self.load_config_from_module_name_or_filename(location=filename) - - def load_config(self): - # parse console args - parser = self.cfg.parser() - args = parser.parse_args() - - # optional settings from apps - cfg = self.init(parser, args, args.args) - - # set up import paths and follow symlinks - self.chdir() - - # Load up the any app specific configuration - if cfg: - for k, v in cfg.items(): - self.cfg.set(k.lower(), v) - - env_args = parser.parse_args(self.cfg.get_cmd_args_from_env()) - - if args.config: - self.load_config_from_file(args.config) - elif env_args.config: - self.load_config_from_file(env_args.config) - else: - default_config = get_default_config_file() - if default_config is not None: - self.load_config_from_file(default_config) - - # Load up environment configuration - for k, v in vars(env_args).items(): - if v is None: - continue - if k == "args": - continue - self.cfg.set(k.lower(), v) - - # Lastly, update the configuration with any command line settings. - for k, v in vars(args).items(): - if v is None: - continue - if k == "args": - continue - self.cfg.set(k.lower(), v) - - # current directory might be changed by the config now - # set up import paths and follow symlinks - self.chdir() - - def run(self): - if self.cfg.print_config: - print(self.cfg) - - if self.cfg.print_config or self.cfg.check_config: - try: - self.load() - except Exception: - msg = "\nError while loading the application:\n" - print(msg, file=sys.stderr) - traceback.print_exc() - sys.stderr.flush() - sys.exit(1) - sys.exit(0) - - if self.cfg.spew: - debug.spew() - - if self.cfg.daemon: - if os.environ.get('NOTIFY_SOCKET'): - msg = "Warning: you shouldn't specify `daemon = True`" \ - " when launching by systemd with `Type = notify`" - print(msg, file=sys.stderr, flush=True) - - util.daemonize(self.cfg.enable_stdio_inheritance) - - # set python paths - if self.cfg.pythonpath: - paths = self.cfg.pythonpath.split(",") - for path in paths: - pythonpath = os.path.abspath(path) - if pythonpath not in sys.path: - sys.path.insert(0, pythonpath) - - super().run() diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/pasterapp.py b/venv/lib/python3.10/site-packages/gunicorn/app/pasterapp.py deleted file mode 100644 index b1738f2..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/app/pasterapp.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import configparser -import os - -from paste.deploy import loadapp - -from gunicorn.app.wsgiapp import WSGIApplication -from gunicorn.config import get_default_config_file - - -def get_wsgi_app(config_uri, name=None, defaults=None): - if ':' not in config_uri: - config_uri = "config:%s" % config_uri - - return loadapp( - config_uri, - name=name, - relative_to=os.getcwd(), - global_conf=defaults, - ) - - -def has_logging_config(config_file): - parser = configparser.ConfigParser() - parser.read([config_file]) - return parser.has_section('loggers') - - -def serve(app, global_conf, **local_conf): - """\ - A Paste Deployment server runner. - - Example configuration: - - [server:main] - use = egg:gunicorn#main - host = 127.0.0.1 - port = 5000 - """ - config_file = global_conf['__file__'] - gunicorn_config_file = local_conf.pop('config', None) - - host = local_conf.pop('host', '') - port = local_conf.pop('port', '') - if host and port: - local_conf['bind'] = '%s:%s' % (host, port) - elif host: - local_conf['bind'] = host.split(',') - - class PasterServerApplication(WSGIApplication): - def load_config(self): - self.cfg.set("default_proc_name", config_file) - - if has_logging_config(config_file): - self.cfg.set("logconfig", config_file) - - if gunicorn_config_file: - self.load_config_from_file(gunicorn_config_file) - else: - default_gunicorn_config_file = get_default_config_file() - if default_gunicorn_config_file is not None: - self.load_config_from_file(default_gunicorn_config_file) - - for k, v in local_conf.items(): - if v is not None: - self.cfg.set(k.lower(), v) - - def load(self): - return app - - PasterServerApplication().run() diff --git a/venv/lib/python3.10/site-packages/gunicorn/app/wsgiapp.py b/venv/lib/python3.10/site-packages/gunicorn/app/wsgiapp.py deleted file mode 100644 index 1b0ba96..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/app/wsgiapp.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import os - -from gunicorn.errors import ConfigError -from gunicorn.app.base import Application -from gunicorn import util - - -class WSGIApplication(Application): - def init(self, parser, opts, args): - self.app_uri = None - - if opts.paste: - from .pasterapp import has_logging_config - - config_uri = os.path.abspath(opts.paste) - config_file = config_uri.split('#')[0] - - if not os.path.exists(config_file): - raise ConfigError("%r not found" % config_file) - - self.cfg.set("default_proc_name", config_file) - self.app_uri = config_uri - - if has_logging_config(config_file): - self.cfg.set("logconfig", config_file) - - return - - if len(args) > 0: - self.cfg.set("default_proc_name", args[0]) - self.app_uri = args[0] - - def load_config(self): - super().load_config() - - if self.app_uri is None: - if self.cfg.wsgi_app is not None: - self.app_uri = self.cfg.wsgi_app - else: - raise ConfigError("No application module specified.") - - def load_wsgiapp(self): - return util.import_app(self.app_uri) - - def load_pasteapp(self): - from .pasterapp import get_wsgi_app - return get_wsgi_app(self.app_uri, defaults=self.cfg.paste_global_conf) - - def load(self): - if self.cfg.paste is not None: - return self.load_pasteapp() - else: - return self.load_wsgiapp() - - -def run(prog=None): - """\ - The ``gunicorn`` command line runner for launching Gunicorn with - generic WSGI applications. - """ - from gunicorn.app.wsgiapp import WSGIApplication - WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]", prog=prog).run() - - -if __name__ == '__main__': - run() diff --git a/venv/lib/python3.10/site-packages/gunicorn/arbiter.py b/venv/lib/python3.10/site-packages/gunicorn/arbiter.py deleted file mode 100644 index 1eaf453..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/arbiter.py +++ /dev/null @@ -1,671 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. -import errno -import os -import random -import select -import signal -import sys -import time -import traceback - -from gunicorn.errors import HaltServer, AppImportError -from gunicorn.pidfile import Pidfile -from gunicorn import sock, systemd, util - -from gunicorn import __version__, SERVER_SOFTWARE - - -class Arbiter: - """ - Arbiter maintain the workers processes alive. It launches or - kills them if needed. It also manages application reloading - via SIGHUP/USR2. - """ - - # A flag indicating if a worker failed to - # to boot. If a worker process exist with - # this error code, the arbiter will terminate. - WORKER_BOOT_ERROR = 3 - - # A flag indicating if an application failed to be loaded - APP_LOAD_ERROR = 4 - - START_CTX = {} - - LISTENERS = [] - WORKERS = {} - PIPE = [] - - # I love dynamic languages - SIG_QUEUE = [] - SIGNALS = [getattr(signal, "SIG%s" % x) - for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()] - SIG_NAMES = dict( - (getattr(signal, name), name[3:].lower()) for name in dir(signal) - if name[:3] == "SIG" and name[3] != "_" - ) - - def __init__(self, app): - os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE - - self._num_workers = None - self._last_logged_active_worker_count = None - self.log = None - - self.setup(app) - - self.pidfile = None - self.systemd = False - self.worker_age = 0 - self.reexec_pid = 0 - self.master_pid = 0 - self.master_name = "Master" - - cwd = util.getcwd() - - args = sys.argv[:] - args.insert(0, sys.executable) - - # init start context - self.START_CTX = { - "args": args, - "cwd": cwd, - 0: sys.executable - } - - def _get_num_workers(self): - return self._num_workers - - def _set_num_workers(self, value): - old_value = self._num_workers - self._num_workers = value - self.cfg.nworkers_changed(self, value, old_value) - num_workers = property(_get_num_workers, _set_num_workers) - - def setup(self, app): - self.app = app - self.cfg = app.cfg - - if self.log is None: - self.log = self.cfg.logger_class(app.cfg) - - # reopen files - if 'GUNICORN_PID' in os.environ: - self.log.reopen_files() - - self.worker_class = self.cfg.worker_class - self.address = self.cfg.address - self.num_workers = self.cfg.workers - self.timeout = self.cfg.timeout - self.proc_name = self.cfg.proc_name - - self.log.debug('Current configuration:\n{0}'.format( - '\n'.join( - ' {0}: {1}'.format(config, value.value) - for config, value - in sorted(self.cfg.settings.items(), - key=lambda setting: setting[1])))) - - # set environment' variables - if self.cfg.env: - for k, v in self.cfg.env.items(): - os.environ[k] = v - - if self.cfg.preload_app: - self.app.wsgi() - - def start(self): - """\ - Initialize the arbiter. Start listening and set pidfile if needed. - """ - self.log.info("Starting gunicorn %s", __version__) - - if 'GUNICORN_PID' in os.environ: - self.master_pid = int(os.environ.get('GUNICORN_PID')) - self.proc_name = self.proc_name + ".2" - self.master_name = "Master.2" - - self.pid = os.getpid() - if self.cfg.pidfile is not None: - pidname = self.cfg.pidfile - if self.master_pid != 0: - pidname += ".2" - self.pidfile = Pidfile(pidname) - self.pidfile.create(self.pid) - self.cfg.on_starting(self) - - self.init_signals() - - if not self.LISTENERS: - fds = None - listen_fds = systemd.listen_fds() - if listen_fds: - self.systemd = True - fds = range(systemd.SD_LISTEN_FDS_START, - systemd.SD_LISTEN_FDS_START + listen_fds) - - elif self.master_pid: - fds = [] - for fd in os.environ.pop('GUNICORN_FD').split(','): - fds.append(int(fd)) - - self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds) - - listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS]) - self.log.debug("Arbiter booted") - self.log.info("Listening at: %s (%s)", listeners_str, self.pid) - self.log.info("Using worker: %s", self.cfg.worker_class_str) - systemd.sd_notify("READY=1\nSTATUS=Gunicorn arbiter booted", self.log) - - # check worker class requirements - if hasattr(self.worker_class, "check_config"): - self.worker_class.check_config(self.cfg, self.log) - - self.cfg.when_ready(self) - - def init_signals(self): - """\ - Initialize master signal handling. Most of the signals - are queued. Child signals only wake up the master. - """ - # close old PIPE - for p in self.PIPE: - os.close(p) - - # initialize the pipe - self.PIPE = pair = os.pipe() - for p in pair: - util.set_non_blocking(p) - util.close_on_exec(p) - - self.log.close_on_exec() - - # initialize all signals - for s in self.SIGNALS: - signal.signal(s, self.signal) - signal.signal(signal.SIGCHLD, self.handle_chld) - - def signal(self, sig, frame): - if len(self.SIG_QUEUE) < 5: - self.SIG_QUEUE.append(sig) - self.wakeup() - - def run(self): - "Main master loop." - self.start() - util._setproctitle("master [%s]" % self.proc_name) - - try: - self.manage_workers() - - while True: - self.maybe_promote_master() - - sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None - if sig is None: - self.sleep() - self.murder_workers() - self.manage_workers() - continue - - if sig not in self.SIG_NAMES: - self.log.info("Ignoring unknown signal: %s", sig) - continue - - signame = self.SIG_NAMES.get(sig) - handler = getattr(self, "handle_%s" % signame, None) - if not handler: - self.log.error("Unhandled signal: %s", signame) - continue - self.log.info("Handling signal: %s", signame) - handler() - self.wakeup() - except (StopIteration, KeyboardInterrupt): - self.halt() - except HaltServer as inst: - self.halt(reason=inst.reason, exit_status=inst.exit_status) - except SystemExit: - raise - except Exception: - self.log.error("Unhandled exception in main loop", - exc_info=True) - self.stop(False) - if self.pidfile is not None: - self.pidfile.unlink() - sys.exit(-1) - - def handle_chld(self, sig, frame): - "SIGCHLD handling" - self.reap_workers() - self.wakeup() - - def handle_hup(self): - """\ - HUP handling. - - Reload configuration - - Start the new worker processes with a new configuration - - Gracefully shutdown the old worker processes - """ - self.log.info("Hang up: %s", self.master_name) - self.reload() - - def handle_term(self): - "SIGTERM handling" - raise StopIteration - - def handle_int(self): - "SIGINT handling" - self.stop(False) - raise StopIteration - - def handle_quit(self): - "SIGQUIT handling" - self.stop(False) - raise StopIteration - - def handle_ttin(self): - """\ - SIGTTIN handling. - Increases the number of workers by one. - """ - self.num_workers += 1 - self.manage_workers() - - def handle_ttou(self): - """\ - SIGTTOU handling. - Decreases the number of workers by one. - """ - if self.num_workers <= 1: - return - self.num_workers -= 1 - self.manage_workers() - - def handle_usr1(self): - """\ - SIGUSR1 handling. - Kill all workers by sending them a SIGUSR1 - """ - self.log.reopen_files() - self.kill_workers(signal.SIGUSR1) - - def handle_usr2(self): - """\ - SIGUSR2 handling. - Creates a new arbiter/worker set as a fork of the current - arbiter without affecting old workers. Use this to do live - deployment with the ability to backout a change. - """ - self.reexec() - - def handle_winch(self): - """SIGWINCH handling""" - if self.cfg.daemon: - self.log.info("graceful stop of workers") - self.num_workers = 0 - self.kill_workers(signal.SIGTERM) - else: - self.log.debug("SIGWINCH ignored. Not daemonized") - - def maybe_promote_master(self): - if self.master_pid == 0: - return - - if self.master_pid != os.getppid(): - self.log.info("Master has been promoted.") - # reset master infos - self.master_name = "Master" - self.master_pid = 0 - self.proc_name = self.cfg.proc_name - del os.environ['GUNICORN_PID'] - # rename the pidfile - if self.pidfile is not None: - self.pidfile.rename(self.cfg.pidfile) - # reset proctitle - util._setproctitle("master [%s]" % self.proc_name) - - def wakeup(self): - """\ - Wake up the arbiter by writing to the PIPE - """ - try: - os.write(self.PIPE[1], b'.') - except OSError as e: - if e.errno not in [errno.EAGAIN, errno.EINTR]: - raise - - def halt(self, reason=None, exit_status=0): - """ halt arbiter """ - self.stop() - - log_func = self.log.info if exit_status == 0 else self.log.error - log_func("Shutting down: %s", self.master_name) - if reason is not None: - log_func("Reason: %s", reason) - - if self.pidfile is not None: - self.pidfile.unlink() - self.cfg.on_exit(self) - sys.exit(exit_status) - - def sleep(self): - """\ - Sleep until PIPE is readable or we timeout. - A readable PIPE means a signal occurred. - """ - try: - ready = select.select([self.PIPE[0]], [], [], 1.0) - if not ready[0]: - return - while os.read(self.PIPE[0], 1): - pass - except OSError as e: - # TODO: select.error is a subclass of OSError since Python 3.3. - error_number = getattr(e, 'errno', e.args[0]) - if error_number not in [errno.EAGAIN, errno.EINTR]: - raise - except KeyboardInterrupt: - sys.exit() - - def stop(self, graceful=True): - """\ - Stop workers - - :attr graceful: boolean, If True (the default) workers will be - killed gracefully (ie. trying to wait for the current connection) - """ - unlink = ( - self.reexec_pid == self.master_pid == 0 - and not self.systemd - and not self.cfg.reuse_port - ) - sock.close_sockets(self.LISTENERS, unlink) - - self.LISTENERS = [] - sig = signal.SIGTERM - if not graceful: - sig = signal.SIGQUIT - limit = time.time() + self.cfg.graceful_timeout - # instruct the workers to exit - self.kill_workers(sig) - # wait until the graceful timeout - while self.WORKERS and time.time() < limit: - time.sleep(0.1) - - self.kill_workers(signal.SIGKILL) - - def reexec(self): - """\ - Relaunch the master and workers. - """ - if self.reexec_pid != 0: - self.log.warning("USR2 signal ignored. Child exists.") - return - - if self.master_pid != 0: - self.log.warning("USR2 signal ignored. Parent exists.") - return - - master_pid = os.getpid() - self.reexec_pid = os.fork() - if self.reexec_pid != 0: - return - - self.cfg.pre_exec(self) - - environ = self.cfg.env_orig.copy() - environ['GUNICORN_PID'] = str(master_pid) - - if self.systemd: - environ['LISTEN_PID'] = str(os.getpid()) - environ['LISTEN_FDS'] = str(len(self.LISTENERS)) - else: - environ['GUNICORN_FD'] = ','.join( - str(lnr.fileno()) for lnr in self.LISTENERS) - - os.chdir(self.START_CTX['cwd']) - - # exec the process using the original environment - os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ) - - def reload(self): - old_address = self.cfg.address - - # reset old environment - for k in self.cfg.env: - if k in self.cfg.env_orig: - # reset the key to the value it had before - # we launched gunicorn - os.environ[k] = self.cfg.env_orig[k] - else: - # delete the value set by gunicorn - try: - del os.environ[k] - except KeyError: - pass - - # reload conf - self.app.reload() - self.setup(self.app) - - # reopen log files - self.log.reopen_files() - - # do we need to change listener ? - if old_address != self.cfg.address: - # close all listeners - for lnr in self.LISTENERS: - lnr.close() - # init new listeners - self.LISTENERS = sock.create_sockets(self.cfg, self.log) - listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS]) - self.log.info("Listening at: %s", listeners_str) - - # do some actions on reload - self.cfg.on_reload(self) - - # unlink pidfile - if self.pidfile is not None: - self.pidfile.unlink() - - # create new pidfile - if self.cfg.pidfile is not None: - self.pidfile = Pidfile(self.cfg.pidfile) - self.pidfile.create(self.pid) - - # set new proc_name - util._setproctitle("master [%s]" % self.proc_name) - - # spawn new workers - for _ in range(self.cfg.workers): - self.spawn_worker() - - # manage workers - self.manage_workers() - - def murder_workers(self): - """\ - Kill unused/idle workers - """ - if not self.timeout: - return - workers = list(self.WORKERS.items()) - for (pid, worker) in workers: - try: - if time.monotonic() - worker.tmp.last_update() <= self.timeout: - continue - except (OSError, ValueError): - continue - - if not worker.aborted: - self.log.critical("WORKER TIMEOUT (pid:%s)", pid) - worker.aborted = True - self.kill_worker(pid, signal.SIGABRT) - else: - self.kill_worker(pid, signal.SIGKILL) - - def reap_workers(self): - """\ - Reap workers to avoid zombie processes - """ - try: - while True: - wpid, status = os.waitpid(-1, os.WNOHANG) - if not wpid: - break - if self.reexec_pid == wpid: - self.reexec_pid = 0 - else: - # A worker was terminated. If the termination reason was - # that it could not boot, we'll shut it down to avoid - # infinite start/stop cycles. - exitcode = status >> 8 - if exitcode != 0: - self.log.error('Worker (pid:%s) exited with code %s', wpid, exitcode) - if exitcode == self.WORKER_BOOT_ERROR: - reason = "Worker failed to boot." - raise HaltServer(reason, self.WORKER_BOOT_ERROR) - if exitcode == self.APP_LOAD_ERROR: - reason = "App failed to load." - raise HaltServer(reason, self.APP_LOAD_ERROR) - - if exitcode > 0: - # If the exit code of the worker is greater than 0, - # let the user know. - self.log.error("Worker (pid:%s) exited with code %s.", - wpid, exitcode) - elif status > 0: - # If the exit code of the worker is 0 and the status - # is greater than 0, then it was most likely killed - # via a signal. - try: - sig_name = signal.Signals(status).name - except ValueError: - sig_name = "code {}".format(status) - msg = "Worker (pid:{}) was sent {}!".format( - wpid, sig_name) - - # Additional hint for SIGKILL - if status == signal.SIGKILL: - msg += " Perhaps out of memory?" - self.log.error(msg) - - worker = self.WORKERS.pop(wpid, None) - if not worker: - continue - worker.tmp.close() - self.cfg.child_exit(self, worker) - except OSError as e: - if e.errno != errno.ECHILD: - raise - - def manage_workers(self): - """\ - Maintain the number of workers by spawning or killing - as required. - """ - if len(self.WORKERS) < self.num_workers: - self.spawn_workers() - - workers = self.WORKERS.items() - workers = sorted(workers, key=lambda w: w[1].age) - while len(workers) > self.num_workers: - (pid, _) = workers.pop(0) - self.kill_worker(pid, signal.SIGTERM) - - active_worker_count = len(workers) - if self._last_logged_active_worker_count != active_worker_count: - self._last_logged_active_worker_count = active_worker_count - self.log.debug("{0} workers".format(active_worker_count), - extra={"metric": "gunicorn.workers", - "value": active_worker_count, - "mtype": "gauge"}) - - def spawn_worker(self): - self.worker_age += 1 - worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS, - self.app, self.timeout / 2.0, - self.cfg, self.log) - self.cfg.pre_fork(self, worker) - pid = os.fork() - if pid != 0: - worker.pid = pid - self.WORKERS[pid] = worker - return pid - - # Do not inherit the temporary files of other workers - for sibling in self.WORKERS.values(): - sibling.tmp.close() - - # Process Child - worker.pid = os.getpid() - try: - util._setproctitle("worker [%s]" % self.proc_name) - self.log.info("Booting worker with pid: %s", worker.pid) - self.cfg.post_fork(self, worker) - worker.init_process() - sys.exit(0) - except SystemExit: - raise - except AppImportError as e: - self.log.debug("Exception while loading the application", - exc_info=True) - print("%s" % e, file=sys.stderr) - sys.stderr.flush() - sys.exit(self.APP_LOAD_ERROR) - except Exception: - self.log.exception("Exception in worker process") - if not worker.booted: - sys.exit(self.WORKER_BOOT_ERROR) - sys.exit(-1) - finally: - self.log.info("Worker exiting (pid: %s)", worker.pid) - try: - worker.tmp.close() - self.cfg.worker_exit(self, worker) - except Exception: - self.log.warning("Exception during worker exit:\n%s", - traceback.format_exc()) - - def spawn_workers(self): - """\ - Spawn new workers as needed. - - This is where a worker process leaves the main loop - of the master process. - """ - - for _ in range(self.num_workers - len(self.WORKERS)): - self.spawn_worker() - time.sleep(0.1 * random.random()) - - def kill_workers(self, sig): - """\ - Kill all workers with the signal `sig` - :attr sig: `signal.SIG*` value - """ - worker_pids = list(self.WORKERS.keys()) - for pid in worker_pids: - self.kill_worker(pid, sig) - - def kill_worker(self, pid, sig): - """\ - Kill a worker - - :attr pid: int, worker pid - :attr sig: `signal.SIG*` value - """ - try: - os.kill(pid, sig) - except OSError as e: - if e.errno == errno.ESRCH: - try: - worker = self.WORKERS.pop(pid) - worker.tmp.close() - self.cfg.worker_exit(self, worker) - return - except (KeyError, OSError): - return - raise diff --git a/venv/lib/python3.10/site-packages/gunicorn/config.py b/venv/lib/python3.10/site-packages/gunicorn/config.py deleted file mode 100644 index 402a26b..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/config.py +++ /dev/null @@ -1,2442 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -# Please remember to run "make -C docs html" after update "desc" attributes. - -import argparse -import copy -import grp -import inspect -import ipaddress -import os -import pwd -import re -import shlex -import ssl -import sys -import textwrap - -from gunicorn import __version__, util -from gunicorn.errors import ConfigError -from gunicorn.reloader import reloader_engines - -KNOWN_SETTINGS = [] -PLATFORM = sys.platform - - -def make_settings(ignore=None): - settings = {} - ignore = ignore or () - for s in KNOWN_SETTINGS: - setting = s() - if setting.name in ignore: - continue - settings[setting.name] = setting.copy() - return settings - - -def auto_int(_, x): - # for compatible with octal numbers in python3 - if re.match(r'0(\d)', x, re.IGNORECASE): - x = x.replace('0', '0o', 1) - return int(x, 0) - - -class Config: - - def __init__(self, usage=None, prog=None): - self.settings = make_settings() - self.usage = usage - self.prog = prog or os.path.basename(sys.argv[0]) - self.env_orig = os.environ.copy() - - def __str__(self): - lines = [] - kmax = max(len(k) for k in self.settings) - for k in sorted(self.settings): - v = self.settings[k].value - if callable(v): - v = "<{}()>".format(v.__qualname__) - lines.append("{k:{kmax}} = {v}".format(k=k, v=v, kmax=kmax)) - return "\n".join(lines) - - def __getattr__(self, name): - if name not in self.settings: - raise AttributeError("No configuration setting for: %s" % name) - return self.settings[name].get() - - def __setattr__(self, name, value): - if name != "settings" and name in self.settings: - raise AttributeError("Invalid access!") - super().__setattr__(name, value) - - def set(self, name, value): - if name not in self.settings: - raise AttributeError("No configuration setting for: %s" % name) - self.settings[name].set(value) - - def get_cmd_args_from_env(self): - if 'GUNICORN_CMD_ARGS' in self.env_orig: - return shlex.split(self.env_orig['GUNICORN_CMD_ARGS']) - return [] - - def parser(self): - kwargs = { - "usage": self.usage, - "prog": self.prog - } - parser = argparse.ArgumentParser(**kwargs) - parser.add_argument("-v", "--version", - action="version", default=argparse.SUPPRESS, - version="%(prog)s (version " + __version__ + ")\n", - help="show program's version number and exit") - parser.add_argument("args", nargs="*", help=argparse.SUPPRESS) - - keys = sorted(self.settings, key=self.settings.__getitem__) - for k in keys: - self.settings[k].add_option(parser) - - return parser - - @property - def worker_class_str(self): - uri = self.settings['worker_class'].get() - - if isinstance(uri, str): - # are we using a threaded worker? - is_sync = uri.endswith('SyncWorker') or uri == 'sync' - if is_sync and self.threads > 1: - return "gthread" - return uri - return uri.__name__ - - @property - def worker_class(self): - uri = self.settings['worker_class'].get() - - # are we using a threaded worker? - is_sync = isinstance(uri, str) and (uri.endswith('SyncWorker') or uri == 'sync') - if is_sync and self.threads > 1: - uri = "gunicorn.workers.gthread.ThreadWorker" - - worker_class = util.load_class(uri) - if hasattr(worker_class, "setup"): - worker_class.setup() - return worker_class - - @property - def address(self): - s = self.settings['bind'].get() - return [util.parse_address(util.bytes_to_str(bind)) for bind in s] - - @property - def uid(self): - return self.settings['user'].get() - - @property - def gid(self): - return self.settings['group'].get() - - @property - def proc_name(self): - pn = self.settings['proc_name'].get() - if pn is not None: - return pn - else: - return self.settings['default_proc_name'].get() - - @property - def logger_class(self): - uri = self.settings['logger_class'].get() - if uri == "simple": - # support the default - uri = LoggerClass.default - - # if default logger is in use, and statsd is on, automagically switch - # to the statsd logger - if uri == LoggerClass.default: - if 'statsd_host' in self.settings and self.settings['statsd_host'].value is not None: - uri = "gunicorn.instrument.statsd.Statsd" - - logger_class = util.load_class( - uri, - default="gunicorn.glogging.Logger", - section="gunicorn.loggers") - - if hasattr(logger_class, "install"): - logger_class.install() - return logger_class - - @property - def is_ssl(self): - return self.certfile or self.keyfile - - @property - def ssl_options(self): - opts = {} - for name, value in self.settings.items(): - if value.section == 'SSL': - opts[name] = value.get() - return opts - - @property - def env(self): - raw_env = self.settings['raw_env'].get() - env = {} - - if not raw_env: - return env - - for e in raw_env: - s = util.bytes_to_str(e) - try: - k, v = s.split('=', 1) - except ValueError: - raise RuntimeError("environment setting %r invalid" % s) - - env[k] = v - - return env - - @property - def sendfile(self): - if self.settings['sendfile'].get() is not None: - return False - - if 'SENDFILE' in os.environ: - sendfile = os.environ['SENDFILE'].lower() - return sendfile in ['y', '1', 'yes', 'true'] - - return True - - @property - def reuse_port(self): - return self.settings['reuse_port'].get() - - @property - def paste_global_conf(self): - raw_global_conf = self.settings['raw_paste_global_conf'].get() - if raw_global_conf is None: - return None - - global_conf = {} - for e in raw_global_conf: - s = util.bytes_to_str(e) - try: - k, v = re.split(r'(?" % ( - self.__class__.__module__, - self.__class__.__name__, - id(self), - self.value, - ) - - -Setting = SettingMeta('Setting', (Setting,), {}) - - -def validate_bool(val): - if val is None: - return - - if isinstance(val, bool): - return val - if not isinstance(val, str): - raise TypeError("Invalid type for casting: %s" % val) - if val.lower().strip() == "true": - return True - elif val.lower().strip() == "false": - return False - else: - raise ValueError("Invalid boolean: %s" % val) - - -def validate_dict(val): - if not isinstance(val, dict): - raise TypeError("Value is not a dictionary: %s " % val) - return val - - -def validate_pos_int(val): - if not isinstance(val, int): - val = int(val, 0) - else: - # Booleans are ints! - val = int(val) - if val < 0: - raise ValueError("Value must be positive: %s" % val) - return val - - -def validate_ssl_version(val): - if val != SSLVersion.default: - sys.stderr.write("Warning: option `ssl_version` is deprecated and it is ignored. Use ssl_context instead.\n") - return val - - -def validate_string(val): - if val is None: - return None - if not isinstance(val, str): - raise TypeError("Not a string: %s" % val) - return val.strip() - - -def validate_file_exists(val): - if val is None: - return None - if not os.path.exists(val): - raise ValueError("File %s does not exists." % val) - return val - - -def validate_list_string(val): - if not val: - return [] - - # legacy syntax - if isinstance(val, str): - val = [val] - - return [validate_string(v) for v in val] - - -def validate_list_of_existing_files(val): - return [validate_file_exists(v) for v in validate_list_string(val)] - - -def validate_string_to_addr_list(val): - val = validate_string_to_list(val) - - for addr in val: - if addr == "*": - continue - _vaid_ip = ipaddress.ip_address(addr) - - return val - - -def validate_string_to_list(val): - val = validate_string(val) - - if not val: - return [] - - return [v.strip() for v in val.split(",") if v] - - -def validate_class(val): - if inspect.isfunction(val) or inspect.ismethod(val): - val = val() - if inspect.isclass(val): - return val - return validate_string(val) - - -def validate_callable(arity): - def _validate_callable(val): - if isinstance(val, str): - try: - mod_name, obj_name = val.rsplit(".", 1) - except ValueError: - raise TypeError("Value '%s' is not import string. " - "Format: module[.submodules...].object" % val) - try: - mod = __import__(mod_name, fromlist=[obj_name]) - val = getattr(mod, obj_name) - except ImportError as e: - raise TypeError(str(e)) - except AttributeError: - raise TypeError("Can not load '%s' from '%s'" - "" % (obj_name, mod_name)) - if not callable(val): - raise TypeError("Value is not callable: %s" % val) - if arity != -1 and arity != util.get_arity(val): - raise TypeError("Value must have an arity of: %s" % arity) - return val - return _validate_callable - - -def validate_user(val): - if val is None: - return os.geteuid() - if isinstance(val, int): - return val - elif val.isdigit(): - return int(val) - else: - try: - return pwd.getpwnam(val).pw_uid - except KeyError: - raise ConfigError("No such user: '%s'" % val) - - -def validate_group(val): - if val is None: - return os.getegid() - - if isinstance(val, int): - return val - elif val.isdigit(): - return int(val) - else: - try: - return grp.getgrnam(val).gr_gid - except KeyError: - raise ConfigError("No such group: '%s'" % val) - - -def validate_post_request(val): - val = validate_callable(-1)(val) - - largs = util.get_arity(val) - if largs == 4: - return val - elif largs == 3: - return lambda worker, req, env, _r: val(worker, req, env) - elif largs == 2: - return lambda worker, req, _e, _r: val(worker, req) - else: - raise TypeError("Value must have an arity of: 4") - - -def validate_chdir(val): - # valid if the value is a string - val = validate_string(val) - - # transform relative paths - path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val))) - - # test if the path exists - if not os.path.exists(path): - raise ConfigError("can't chdir to %r" % val) - - return path - - -def validate_statsd_address(val): - val = validate_string(val) - if val is None: - return None - - # As of major release 20, util.parse_address would recognize unix:PORT - # as a UDS address, breaking backwards compatibility. We defend against - # that regression here (this is also unit-tested). - # Feel free to remove in the next major release. - unix_hostname_regression = re.match(r'^unix:(\d+)$', val) - if unix_hostname_regression: - return ('unix', int(unix_hostname_regression.group(1))) - - try: - address = util.parse_address(val, default_port='8125') - except RuntimeError: - raise TypeError("Value must be one of ('host:port', 'unix://PATH')") - - return address - - -def validate_reload_engine(val): - if val not in reloader_engines: - raise ConfigError("Invalid reload_engine: %r" % val) - - return val - - -def get_default_config_file(): - config_path = os.path.join(os.path.abspath(os.getcwd()), - 'gunicorn.conf.py') - if os.path.exists(config_path): - return config_path - return None - - -class ConfigFile(Setting): - name = "config" - section = "Config File" - cli = ["-c", "--config"] - meta = "CONFIG" - validator = validate_string - default = "./gunicorn.conf.py" - desc = """\ - :ref:`The Gunicorn config file`. - - A string of the form ``PATH``, ``file:PATH``, or ``python:MODULE_NAME``. - - Only has an effect when specified on the command line or as part of an - application specific configuration. - - By default, a file named ``gunicorn.conf.py`` will be read from the same - directory where gunicorn is being run. - - .. versionchanged:: 19.4 - Loading the config from a Python module requires the ``python:`` - prefix. - """ - - -class WSGIApp(Setting): - name = "wsgi_app" - section = "Config File" - meta = "STRING" - validator = validate_string - default = None - desc = """\ - A WSGI application path in pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. - - .. versionadded:: 20.1.0 - """ - - -class Bind(Setting): - name = "bind" - action = "append" - section = "Server Socket" - cli = ["-b", "--bind"] - meta = "ADDRESS" - validator = validate_list_string - - if 'PORT' in os.environ: - default = ['0.0.0.0:{0}'.format(os.environ.get('PORT'))] - else: - default = ['127.0.0.1:8000'] - - desc = """\ - The socket to bind. - - A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``, - ``fd://FD``. An IP is a valid ``HOST``. - - .. versionchanged:: 20.0 - Support for ``fd://FD`` got added. - - Multiple addresses can be bound. ex.:: - - $ gunicorn -b 127.0.0.1:8000 -b [::1]:8000 test:app - - will bind the `test:app` application on localhost both on ipv6 - and ipv4 interfaces. - - If the ``PORT`` environment variable is defined, the default - is ``['0.0.0.0:$PORT']``. If it is not defined, the default - is ``['127.0.0.1:8000']``. - """ - - -class Backlog(Setting): - name = "backlog" - section = "Server Socket" - cli = ["--backlog"] - meta = "INT" - validator = validate_pos_int - type = int - default = 2048 - desc = """\ - The maximum number of pending connections. - - This refers to the number of clients that can be waiting to be served. - Exceeding this number results in the client getting an error when - attempting to connect. It should only affect servers under significant - load. - - Must be a positive integer. Generally set in the 64-2048 range. - """ - - -class Workers(Setting): - name = "workers" - section = "Worker Processes" - cli = ["-w", "--workers"] - meta = "INT" - validator = validate_pos_int - type = int - default = int(os.environ.get("WEB_CONCURRENCY", 1)) - desc = """\ - The number of worker processes for handling requests. - - A positive integer generally in the ``2-4 x $(NUM_CORES)`` range. - You'll want to vary this a bit to find the best for your particular - application's work load. - - By default, the value of the ``WEB_CONCURRENCY`` environment variable, - which is set by some Platform-as-a-Service providers such as Heroku. If - it is not defined, the default is ``1``. - """ - - -class WorkerClass(Setting): - name = "worker_class" - section = "Worker Processes" - cli = ["-k", "--worker-class"] - meta = "STRING" - validator = validate_class - default = "sync" - desc = """\ - The type of workers to use. - - The default class (``sync``) should handle most "normal" types of - workloads. You'll want to read :doc:`design` for information on when - you might want to choose one of the other worker classes. Required - libraries may be installed using setuptools' ``extras_require`` feature. - - A string referring to one of the following bundled classes: - - * ``sync`` - * ``eventlet`` - Requires eventlet >= 0.24.1 (or install it via - ``pip install gunicorn[eventlet]``) - * ``gevent`` - Requires gevent >= 1.4 (or install it via - ``pip install gunicorn[gevent]``) - * ``tornado`` - Requires tornado >= 0.2 (or install it via - ``pip install gunicorn[tornado]``) - * ``gthread`` - Python 2 requires the futures package to be installed - (or install it via ``pip install gunicorn[gthread]``) - - Optionally, you can provide your own worker by giving Gunicorn a - Python path to a subclass of ``gunicorn.workers.base.Worker``. - This alternative syntax will load the gevent class: - ``gunicorn.workers.ggevent.GeventWorker``. - """ - - -class WorkerThreads(Setting): - name = "threads" - section = "Worker Processes" - cli = ["--threads"] - meta = "INT" - validator = validate_pos_int - type = int - default = 1 - desc = """\ - The number of worker threads for handling requests. - - Run each worker with the specified number of threads. - - A positive integer generally in the ``2-4 x $(NUM_CORES)`` range. - You'll want to vary this a bit to find the best for your particular - application's work load. - - If it is not defined, the default is ``1``. - - This setting only affects the Gthread worker type. - - .. note:: - If you try to use the ``sync`` worker type and set the ``threads`` - setting to more than 1, the ``gthread`` worker type will be used - instead. - """ - - -class WorkerConnections(Setting): - name = "worker_connections" - section = "Worker Processes" - cli = ["--worker-connections"] - meta = "INT" - validator = validate_pos_int - type = int - default = 1000 - desc = """\ - The maximum number of simultaneous clients. - - This setting only affects the ``gthread``, ``eventlet`` and ``gevent`` worker types. - """ - - -class MaxRequests(Setting): - name = "max_requests" - section = "Worker Processes" - cli = ["--max-requests"] - meta = "INT" - validator = validate_pos_int - type = int - default = 0 - desc = """\ - The maximum number of requests a worker will process before restarting. - - Any value greater than zero will limit the number of requests a worker - will process before automatically restarting. This is a simple method - to help limit the damage of memory leaks. - - If this is set to zero (the default) then the automatic worker - restarts are disabled. - """ - - -class MaxRequestsJitter(Setting): - name = "max_requests_jitter" - section = "Worker Processes" - cli = ["--max-requests-jitter"] - meta = "INT" - validator = validate_pos_int - type = int - default = 0 - desc = """\ - The maximum jitter to add to the *max_requests* setting. - - The jitter causes the restart per worker to be randomized by - ``randint(0, max_requests_jitter)``. This is intended to stagger worker - restarts to avoid all workers restarting at the same time. - - .. versionadded:: 19.2 - """ - - -class Timeout(Setting): - name = "timeout" - section = "Worker Processes" - cli = ["-t", "--timeout"] - meta = "INT" - validator = validate_pos_int - type = int - default = 30 - desc = """\ - Workers silent for more than this many seconds are killed and restarted. - - Value is a positive number or 0. Setting it to 0 has the effect of - infinite timeouts by disabling timeouts for all workers entirely. - - Generally, the default of thirty seconds should suffice. Only set this - noticeably higher if you're sure of the repercussions for sync workers. - For the non sync workers it just means that the worker process is still - communicating and is not tied to the length of time required to handle a - single request. - """ - - -class GracefulTimeout(Setting): - name = "graceful_timeout" - section = "Worker Processes" - cli = ["--graceful-timeout"] - meta = "INT" - validator = validate_pos_int - type = int - default = 30 - desc = """\ - Timeout for graceful workers restart. - - After receiving a restart signal, workers have this much time to finish - serving requests. Workers still alive after the timeout (starting from - the receipt of the restart signal) are force killed. - """ - - -class Keepalive(Setting): - name = "keepalive" - section = "Worker Processes" - cli = ["--keep-alive"] - meta = "INT" - validator = validate_pos_int - type = int - default = 2 - desc = """\ - The number of seconds to wait for requests on a Keep-Alive connection. - - Generally set in the 1-5 seconds range for servers with direct connection - to the client (e.g. when you don't have separate load balancer). When - Gunicorn is deployed behind a load balancer, it often makes sense to - set this to a higher value. - - .. note:: - ``sync`` worker does not support persistent connections and will - ignore this option. - """ - - -class LimitRequestLine(Setting): - name = "limit_request_line" - section = "Security" - cli = ["--limit-request-line"] - meta = "INT" - validator = validate_pos_int - type = int - default = 4094 - desc = """\ - The maximum size of HTTP request line in bytes. - - This parameter is used to limit the allowed size of a client's - HTTP request-line. Since the request-line consists of the HTTP - method, URI, and protocol version, this directive places a - restriction on the length of a request-URI allowed for a request - on the server. A server needs this value to be large enough to - hold any of its resource names, including any information that - might be passed in the query part of a GET request. Value is a number - from 0 (unlimited) to 8190. - - This parameter can be used to prevent any DDOS attack. - """ - - -class LimitRequestFields(Setting): - name = "limit_request_fields" - section = "Security" - cli = ["--limit-request-fields"] - meta = "INT" - validator = validate_pos_int - type = int - default = 100 - desc = """\ - Limit the number of HTTP headers fields in a request. - - This parameter is used to limit the number of headers in a request to - prevent DDOS attack. Used with the *limit_request_field_size* it allows - more safety. By default this value is 100 and can't be larger than - 32768. - """ - - -class LimitRequestFieldSize(Setting): - name = "limit_request_field_size" - section = "Security" - cli = ["--limit-request-field_size"] - meta = "INT" - validator = validate_pos_int - type = int - default = 8190 - desc = """\ - Limit the allowed size of an HTTP request header field. - - Value is a positive number or 0. Setting it to 0 will allow unlimited - header field sizes. - - .. warning:: - Setting this parameter to a very high or unlimited value can open - up for DDOS attacks. - """ - - -class Reload(Setting): - name = "reload" - section = 'Debugging' - cli = ['--reload'] - validator = validate_bool - action = 'store_true' - default = False - - desc = '''\ - Restart workers when code changes. - - This setting is intended for development. It will cause workers to be - restarted whenever application code changes. - - The reloader is incompatible with application preloading. When using a - paste configuration be sure that the server block does not import any - application code or the reload will not work as designed. - - The default behavior is to attempt inotify with a fallback to file - system polling. Generally, inotify should be preferred if available - because it consumes less system resources. - - .. note:: - In order to use the inotify reloader, you must have the ``inotify`` - package installed. - ''' - - -class ReloadEngine(Setting): - name = "reload_engine" - section = "Debugging" - cli = ["--reload-engine"] - meta = "STRING" - validator = validate_reload_engine - default = "auto" - desc = """\ - The implementation that should be used to power :ref:`reload`. - - Valid engines are: - - * ``'auto'`` - * ``'poll'`` - * ``'inotify'`` (requires inotify) - - .. versionadded:: 19.7 - """ - - -class ReloadExtraFiles(Setting): - name = "reload_extra_files" - action = "append" - section = "Debugging" - cli = ["--reload-extra-file"] - meta = "FILES" - validator = validate_list_of_existing_files - default = [] - desc = """\ - Extends :ref:`reload` option to also watch and reload on additional files - (e.g., templates, configurations, specifications, etc.). - - .. versionadded:: 19.8 - """ - - -class Spew(Setting): - name = "spew" - section = "Debugging" - cli = ["--spew"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Install a trace function that spews every line executed by the server. - - This is the nuclear option. - """ - - -class ConfigCheck(Setting): - name = "check_config" - section = "Debugging" - cli = ["--check-config"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Check the configuration and exit. The exit status is 0 if the - configuration is correct, and 1 if the configuration is incorrect. - """ - - -class PrintConfig(Setting): - name = "print_config" - section = "Debugging" - cli = ["--print-config"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Print the configuration settings as fully resolved. Implies :ref:`check-config`. - """ - - -class PreloadApp(Setting): - name = "preload_app" - section = "Server Mechanics" - cli = ["--preload"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Load application code before the worker processes are forked. - - By preloading an application you can save some RAM resources as well as - speed up server boot times. Although, if you defer application loading - to each worker process, you can reload your application code easily by - restarting workers. - """ - - -class Sendfile(Setting): - name = "sendfile" - section = "Server Mechanics" - cli = ["--no-sendfile"] - validator = validate_bool - action = "store_const" - const = False - - desc = """\ - Disables the use of ``sendfile()``. - - If not set, the value of the ``SENDFILE`` environment variable is used - to enable or disable its usage. - - .. versionadded:: 19.2 - .. versionchanged:: 19.4 - Swapped ``--sendfile`` with ``--no-sendfile`` to actually allow - disabling. - .. versionchanged:: 19.6 - added support for the ``SENDFILE`` environment variable - """ - - -class ReusePort(Setting): - name = "reuse_port" - section = "Server Mechanics" - cli = ["--reuse-port"] - validator = validate_bool - action = "store_true" - default = False - - desc = """\ - Set the ``SO_REUSEPORT`` flag on the listening socket. - - .. versionadded:: 19.8 - """ - - -class Chdir(Setting): - name = "chdir" - section = "Server Mechanics" - cli = ["--chdir"] - validator = validate_chdir - default = util.getcwd() - default_doc = "``'.'``" - desc = """\ - Change directory to specified directory before loading apps. - """ - - -class Daemon(Setting): - name = "daemon" - section = "Server Mechanics" - cli = ["-D", "--daemon"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Daemonize the Gunicorn process. - - Detaches the server from the controlling terminal and enters the - background. - """ - - -class Env(Setting): - name = "raw_env" - action = "append" - section = "Server Mechanics" - cli = ["-e", "--env"] - meta = "ENV" - validator = validate_list_string - default = [] - - desc = """\ - Set environment variables in the execution environment. - - Should be a list of strings in the ``key=value`` format. - - For example on the command line: - - .. code-block:: console - - $ gunicorn -b 127.0.0.1:8000 --env FOO=1 test:app - - Or in the configuration file: - - .. code-block:: python - - raw_env = ["FOO=1"] - """ - - -class Pidfile(Setting): - name = "pidfile" - section = "Server Mechanics" - cli = ["-p", "--pid"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - A filename to use for the PID file. - - If not set, no PID file will be written. - """ - - -class WorkerTmpDir(Setting): - name = "worker_tmp_dir" - section = "Server Mechanics" - cli = ["--worker-tmp-dir"] - meta = "DIR" - validator = validate_string - default = None - desc = """\ - A directory to use for the worker heartbeat temporary file. - - If not set, the default temporary directory will be used. - - .. note:: - The current heartbeat system involves calling ``os.fchmod`` on - temporary file handlers and may block a worker for arbitrary time - if the directory is on a disk-backed filesystem. - - See :ref:`blocking-os-fchmod` for more detailed information - and a solution for avoiding this problem. - """ - - -class User(Setting): - name = "user" - section = "Server Mechanics" - cli = ["-u", "--user"] - meta = "USER" - validator = validate_user - default = os.geteuid() - default_doc = "``os.geteuid()``" - desc = """\ - Switch worker processes to run as this user. - - A valid user id (as an integer) or the name of a user that can be - retrieved with a call to ``pwd.getpwnam(value)`` or ``None`` to not - change the worker process user. - """ - - -class Group(Setting): - name = "group" - section = "Server Mechanics" - cli = ["-g", "--group"] - meta = "GROUP" - validator = validate_group - default = os.getegid() - default_doc = "``os.getegid()``" - desc = """\ - Switch worker process to run as this group. - - A valid group id (as an integer) or the name of a user that can be - retrieved with a call to ``pwd.getgrnam(value)`` or ``None`` to not - change the worker processes group. - """ - - -class Umask(Setting): - name = "umask" - section = "Server Mechanics" - cli = ["-m", "--umask"] - meta = "INT" - validator = validate_pos_int - type = auto_int - default = 0 - desc = """\ - A bit mask for the file mode on files written by Gunicorn. - - Note that this affects unix socket permissions. - - A valid value for the ``os.umask(mode)`` call or a string compatible - with ``int(value, 0)`` (``0`` means Python guesses the base, so values - like ``0``, ``0xFF``, ``0022`` are valid for decimal, hex, and octal - representations) - """ - - -class Initgroups(Setting): - name = "initgroups" - section = "Server Mechanics" - cli = ["--initgroups"] - validator = validate_bool - action = 'store_true' - default = False - - desc = """\ - If true, set the worker process's group access list with all of the - groups of which the specified username is a member, plus the specified - group id. - - .. versionadded:: 19.7 - """ - - -class TmpUploadDir(Setting): - name = "tmp_upload_dir" - section = "Server Mechanics" - meta = "DIR" - validator = validate_string - default = None - desc = """\ - Directory to store temporary request data as they are read. - - This may disappear in the near future. - - This path should be writable by the process permissions set for Gunicorn - workers. If not specified, Gunicorn will choose a system generated - temporary directory. - """ - - -class SecureSchemeHeader(Setting): - name = "secure_scheme_headers" - section = "Server Mechanics" - validator = validate_dict - default = { - "X-FORWARDED-PROTOCOL": "ssl", - "X-FORWARDED-PROTO": "https", - "X-FORWARDED-SSL": "on" - } - desc = """\ - - A dictionary containing headers and values that the front-end proxy - uses to indicate HTTPS requests. If the source IP is permitted by - :ref:`forwarded-allow-ips` (below), *and* at least one request header matches - a key-value pair listed in this dictionary, then Gunicorn will set - ``wsgi.url_scheme`` to ``https``, so your application can tell that the - request is secure. - - If the other headers listed in this dictionary are not present in the request, they will be ignored, - but if the other headers are present and do not match the provided values, then - the request will fail to parse. See the note below for more detailed examples of this behaviour. - - The dictionary should map upper-case header names to exact string - values. The value comparisons are case-sensitive, unlike the header - names, so make sure they're exactly what your front-end proxy sends - when handling HTTPS requests. - - It is important that your front-end proxy configuration ensures that - the headers defined here can not be passed directly from the client. - """ - - -class ForwardedAllowIPS(Setting): - name = "forwarded_allow_ips" - section = "Server Mechanics" - cli = ["--forwarded-allow-ips"] - meta = "STRING" - validator = validate_string_to_addr_list - default = os.environ.get("FORWARDED_ALLOW_IPS", "127.0.0.1,::1") - desc = """\ - Front-end's IPs from which allowed to handle set secure headers. - (comma separated). - - Set to ``*`` to disable checking of front-end IPs. This is useful for setups - where you don't know in advance the IP address of front-end, but - instead have ensured via other means that only your - authorized front-ends can access Gunicorn. - - By default, the value of the ``FORWARDED_ALLOW_IPS`` environment - variable. If it is not defined, the default is ``"127.0.0.1,::1"``. - - .. note:: - - This option does not affect UNIX socket connections. Connections not associated with - an IP address are treated as allowed, unconditionally. - - .. note:: - - The interplay between the request headers, the value of ``forwarded_allow_ips``, and the value of - ``secure_scheme_headers`` is complex. Various scenarios are documented below to further elaborate. - In each case, we have a request from the remote address 134.213.44.18, and the default value of - ``secure_scheme_headers``: - - .. code:: - - secure_scheme_headers = { - 'X-FORWARDED-PROTOCOL': 'ssl', - 'X-FORWARDED-PROTO': 'https', - 'X-FORWARDED-SSL': 'on' - } - - - .. list-table:: - :header-rows: 1 - :align: center - :widths: auto - - * - ``forwarded-allow-ips`` - - Secure Request Headers - - Result - - Explanation - * - .. code:: - - ["127.0.0.1"] - - .. code:: - - X-Forwarded-Proto: https - - .. code:: - - wsgi.url_scheme = "http" - - IP address was not allowed - * - .. code:: - - "*" - - - - .. code:: - - wsgi.url_scheme = "http" - - IP address allowed, but no secure headers provided - * - .. code:: - - "*" - - .. code:: - - X-Forwarded-Proto: https - - .. code:: - - wsgi.url_scheme = "https" - - IP address allowed, one request header matched - * - .. code:: - - ["134.213.44.18"] - - .. code:: - - X-Forwarded-Ssl: on - X-Forwarded-Proto: http - - ``InvalidSchemeHeaders()`` raised - - IP address allowed, but the two secure headers disagreed on if HTTPS was used - - - """ - - -class AccessLog(Setting): - name = "accesslog" - section = "Logging" - cli = ["--access-logfile"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - The Access log file to write to. - - ``'-'`` means log to stdout. - """ - - -class DisableRedirectAccessToSyslog(Setting): - name = "disable_redirect_access_to_syslog" - section = "Logging" - cli = ["--disable-redirect-access-to-syslog"] - validator = validate_bool - action = 'store_true' - default = False - desc = """\ - Disable redirect access logs to syslog. - - .. versionadded:: 19.8 - """ - - -class AccessLogFormat(Setting): - name = "access_log_format" - section = "Logging" - cli = ["--access-logformat"] - meta = "STRING" - validator = validate_string - default = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' - desc = """\ - The access log format. - - =========== =========== - Identifier Description - =========== =========== - h remote address - l ``'-'`` - u user name (if HTTP Basic auth used) - t date of the request - r status line (e.g. ``GET / HTTP/1.1``) - m request method - U URL path without query string - q query string - H protocol - s status - B response length - b response length or ``'-'`` (CLF format) - f referrer (note: header is ``referer``) - a user agent - T request time in seconds - M request time in milliseconds - D request time in microseconds - L request time in decimal seconds - p process ID - {header}i request header - {header}o response header - {variable}e environment variable - =========== =========== - - Use lowercase for header and environment variable names, and put - ``{...}x`` names inside ``%(...)s``. For example:: - - %({x-forwarded-for}i)s - """ - - -class ErrorLog(Setting): - name = "errorlog" - section = "Logging" - cli = ["--error-logfile", "--log-file"] - meta = "FILE" - validator = validate_string - default = '-' - desc = """\ - The Error log file to write to. - - Using ``'-'`` for FILE makes gunicorn log to stderr. - - .. versionchanged:: 19.2 - Log to stderr by default. - - """ - - -class Loglevel(Setting): - name = "loglevel" - section = "Logging" - cli = ["--log-level"] - meta = "LEVEL" - validator = validate_string - default = "info" - desc = """\ - The granularity of Error log outputs. - - Valid level names are: - - * ``'debug'`` - * ``'info'`` - * ``'warning'`` - * ``'error'`` - * ``'critical'`` - """ - - -class CaptureOutput(Setting): - name = "capture_output" - section = "Logging" - cli = ["--capture-output"] - validator = validate_bool - action = 'store_true' - default = False - desc = """\ - Redirect stdout/stderr to specified file in :ref:`errorlog`. - - .. versionadded:: 19.6 - """ - - -class LoggerClass(Setting): - name = "logger_class" - section = "Logging" - cli = ["--logger-class"] - meta = "STRING" - validator = validate_class - default = "gunicorn.glogging.Logger" - desc = """\ - The logger you want to use to log events in Gunicorn. - - The default class (``gunicorn.glogging.Logger``) handles most - normal usages in logging. It provides error and access logging. - - You can provide your own logger by giving Gunicorn a Python path to a - class that quacks like ``gunicorn.glogging.Logger``. - """ - - -class LogConfig(Setting): - name = "logconfig" - section = "Logging" - cli = ["--log-config"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - The log config file to use. - Gunicorn uses the standard Python logging module's Configuration - file format. - """ - - -class LogConfigDict(Setting): - name = "logconfig_dict" - section = "Logging" - validator = validate_dict - default = {} - desc = """\ - The log config dictionary to use, using the standard Python - logging module's dictionary configuration format. This option - takes precedence over the :ref:`logconfig` and :ref:`logconfig-json` options, - which uses the older file configuration format and JSON - respectively. - - Format: https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig - - For more context you can look at the default configuration dictionary for logging, - which can be found at ``gunicorn.glogging.CONFIG_DEFAULTS``. - - .. versionadded:: 19.8 - """ - - -class LogConfigJson(Setting): - name = "logconfig_json" - section = "Logging" - cli = ["--log-config-json"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - The log config to read config from a JSON file - - Format: https://docs.python.org/3/library/logging.config.html#logging.config.jsonConfig - - .. versionadded:: 20.0 - """ - - -class SyslogTo(Setting): - name = "syslog_addr" - section = "Logging" - cli = ["--log-syslog-to"] - meta = "SYSLOG_ADDR" - validator = validate_string - - if PLATFORM == "darwin": - default = "unix:///var/run/syslog" - elif PLATFORM in ('freebsd', 'dragonfly', ): - default = "unix:///var/run/log" - elif PLATFORM == "openbsd": - default = "unix:///dev/log" - else: - default = "udp://localhost:514" - - desc = """\ - Address to send syslog messages. - - Address is a string of the form: - - * ``unix://PATH#TYPE`` : for unix domain socket. ``TYPE`` can be ``stream`` - for the stream driver or ``dgram`` for the dgram driver. - ``stream`` is the default. - * ``udp://HOST:PORT`` : for UDP sockets - * ``tcp://HOST:PORT`` : for TCP sockets - - """ - - -class Syslog(Setting): - name = "syslog" - section = "Logging" - cli = ["--log-syslog"] - validator = validate_bool - action = 'store_true' - default = False - desc = """\ - Send *Gunicorn* logs to syslog. - - .. versionchanged:: 19.8 - You can now disable sending access logs by using the - :ref:`disable-redirect-access-to-syslog` setting. - """ - - -class SyslogPrefix(Setting): - name = "syslog_prefix" - section = "Logging" - cli = ["--log-syslog-prefix"] - meta = "SYSLOG_PREFIX" - validator = validate_string - default = None - desc = """\ - Makes Gunicorn use the parameter as program-name in the syslog entries. - - All entries will be prefixed by ``gunicorn.``. By default the - program name is the name of the process. - """ - - -class SyslogFacility(Setting): - name = "syslog_facility" - section = "Logging" - cli = ["--log-syslog-facility"] - meta = "SYSLOG_FACILITY" - validator = validate_string - default = "user" - desc = """\ - Syslog facility name - """ - - -class EnableStdioInheritance(Setting): - name = "enable_stdio_inheritance" - section = "Logging" - cli = ["-R", "--enable-stdio-inheritance"] - validator = validate_bool - default = False - action = "store_true" - desc = """\ - Enable stdio inheritance. - - Enable inheritance for stdio file descriptors in daemon mode. - - Note: To disable the Python stdout buffering, you can to set the user - environment variable ``PYTHONUNBUFFERED`` . - """ - - -# statsD monitoring -class StatsdHost(Setting): - name = "statsd_host" - section = "Logging" - cli = ["--statsd-host"] - meta = "STATSD_ADDR" - default = None - validator = validate_statsd_address - desc = """\ - The address of the StatsD server to log to. - - Address is a string of the form: - - * ``unix://PATH`` : for a unix domain socket. - * ``HOST:PORT`` : for a network address - - .. versionadded:: 19.1 - """ - - -# Datadog Statsd (dogstatsd) tags. https://docs.datadoghq.com/developers/dogstatsd/ -class DogstatsdTags(Setting): - name = "dogstatsd_tags" - section = "Logging" - cli = ["--dogstatsd-tags"] - meta = "DOGSTATSD_TAGS" - default = "" - validator = validate_string - desc = """\ - A comma-delimited list of datadog statsd (dogstatsd) tags to append to - statsd metrics. - - .. versionadded:: 20 - """ - - -class StatsdPrefix(Setting): - name = "statsd_prefix" - section = "Logging" - cli = ["--statsd-prefix"] - meta = "STATSD_PREFIX" - default = "" - validator = validate_string - desc = """\ - Prefix to use when emitting statsd metrics (a trailing ``.`` is added, - if not provided). - - .. versionadded:: 19.2 - """ - - -class Procname(Setting): - name = "proc_name" - section = "Process Naming" - cli = ["-n", "--name"] - meta = "STRING" - validator = validate_string - default = None - desc = """\ - A base to use with setproctitle for process naming. - - This affects things like ``ps`` and ``top``. If you're going to be - running more than one instance of Gunicorn you'll probably want to set a - name to tell them apart. This requires that you install the setproctitle - module. - - If not set, the *default_proc_name* setting will be used. - """ - - -class DefaultProcName(Setting): - name = "default_proc_name" - section = "Process Naming" - validator = validate_string - default = "gunicorn" - desc = """\ - Internal setting that is adjusted for each type of application. - """ - - -class PythonPath(Setting): - name = "pythonpath" - section = "Server Mechanics" - cli = ["--pythonpath"] - meta = "STRING" - validator = validate_string - default = None - desc = """\ - A comma-separated list of directories to add to the Python path. - - e.g. - ``'/home/djangoprojects/myproject,/home/python/mylibrary'``. - """ - - -class Paste(Setting): - name = "paste" - section = "Server Mechanics" - cli = ["--paste", "--paster"] - meta = "STRING" - validator = validate_string - default = None - desc = """\ - Load a PasteDeploy config file. The argument may contain a ``#`` - symbol followed by the name of an app section from the config file, - e.g. ``production.ini#admin``. - - At this time, using alternate server blocks is not supported. Use the - command line arguments to control server configuration instead. - """ - - -class OnStarting(Setting): - name = "on_starting" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def on_starting(server): - pass - default = staticmethod(on_starting) - desc = """\ - Called just before the master process is initialized. - - The callable needs to accept a single instance variable for the Arbiter. - """ - - -class OnReload(Setting): - name = "on_reload" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def on_reload(server): - pass - default = staticmethod(on_reload) - desc = """\ - Called to recycle workers during a reload via SIGHUP. - - The callable needs to accept a single instance variable for the Arbiter. - """ - - -class WhenReady(Setting): - name = "when_ready" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def when_ready(server): - pass - default = staticmethod(when_ready) - desc = """\ - Called just after the server is started. - - The callable needs to accept a single instance variable for the Arbiter. - """ - - -class Prefork(Setting): - name = "pre_fork" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def pre_fork(server, worker): - pass - default = staticmethod(pre_fork) - desc = """\ - Called just before a worker is forked. - - The callable needs to accept two instance variables for the Arbiter and - new Worker. - """ - - -class Postfork(Setting): - name = "post_fork" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def post_fork(server, worker): - pass - default = staticmethod(post_fork) - desc = """\ - Called just after a worker has been forked. - - The callable needs to accept two instance variables for the Arbiter and - new Worker. - """ - - -class PostWorkerInit(Setting): - name = "post_worker_init" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def post_worker_init(worker): - pass - - default = staticmethod(post_worker_init) - desc = """\ - Called just after a worker has initialized the application. - - The callable needs to accept one instance variable for the initialized - Worker. - """ - - -class WorkerInt(Setting): - name = "worker_int" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def worker_int(worker): - pass - - default = staticmethod(worker_int) - desc = """\ - Called just after a worker exited on SIGINT or SIGQUIT. - - The callable needs to accept one instance variable for the initialized - Worker. - """ - - -class WorkerAbort(Setting): - name = "worker_abort" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def worker_abort(worker): - pass - - default = staticmethod(worker_abort) - desc = """\ - Called when a worker received the SIGABRT signal. - - This call generally happens on timeout. - - The callable needs to accept one instance variable for the initialized - Worker. - """ - - -class PreExec(Setting): - name = "pre_exec" - section = "Server Hooks" - validator = validate_callable(1) - type = callable - - def pre_exec(server): - pass - default = staticmethod(pre_exec) - desc = """\ - Called just before a new master process is forked. - - The callable needs to accept a single instance variable for the Arbiter. - """ - - -class PreRequest(Setting): - name = "pre_request" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def pre_request(worker, req): - worker.log.debug("%s %s", req.method, req.path) - default = staticmethod(pre_request) - desc = """\ - Called just before a worker processes the request. - - The callable needs to accept two instance variables for the Worker and - the Request. - """ - - -class PostRequest(Setting): - name = "post_request" - section = "Server Hooks" - validator = validate_post_request - type = callable - - def post_request(worker, req, environ, resp): - pass - default = staticmethod(post_request) - desc = """\ - Called after a worker processes the request. - - The callable needs to accept two instance variables for the Worker and - the Request. - """ - - -class ChildExit(Setting): - name = "child_exit" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def child_exit(server, worker): - pass - default = staticmethod(child_exit) - desc = """\ - Called just after a worker has been exited, in the master process. - - The callable needs to accept two instance variables for the Arbiter and - the just-exited Worker. - - .. versionadded:: 19.7 - """ - - -class WorkerExit(Setting): - name = "worker_exit" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def worker_exit(server, worker): - pass - default = staticmethod(worker_exit) - desc = """\ - Called just after a worker has been exited, in the worker process. - - The callable needs to accept two instance variables for the Arbiter and - the just-exited Worker. - """ - - -class NumWorkersChanged(Setting): - name = "nworkers_changed" - section = "Server Hooks" - validator = validate_callable(3) - type = callable - - def nworkers_changed(server, new_value, old_value): - pass - default = staticmethod(nworkers_changed) - desc = """\ - Called just after *num_workers* has been changed. - - The callable needs to accept an instance variable of the Arbiter and - two integers of number of workers after and before change. - - If the number of workers is set for the first time, *old_value* would - be ``None``. - """ - - -class OnExit(Setting): - name = "on_exit" - section = "Server Hooks" - validator = validate_callable(1) - - def on_exit(server): - pass - - default = staticmethod(on_exit) - desc = """\ - Called just before exiting Gunicorn. - - The callable needs to accept a single instance variable for the Arbiter. - """ - - -class NewSSLContext(Setting): - name = "ssl_context" - section = "Server Hooks" - validator = validate_callable(2) - type = callable - - def ssl_context(config, default_ssl_context_factory): - return default_ssl_context_factory() - - default = staticmethod(ssl_context) - desc = """\ - Called when SSLContext is needed. - - Allows customizing SSL context. - - The callable needs to accept an instance variable for the Config and - a factory function that returns default SSLContext which is initialized - with certificates, private key, cert_reqs, and ciphers according to - config and can be further customized by the callable. - The callable needs to return SSLContext object. - - Following example shows a configuration file that sets the minimum TLS version to 1.3: - - .. code-block:: python - - def ssl_context(conf, default_ssl_context_factory): - import ssl - context = default_ssl_context_factory() - context.minimum_version = ssl.TLSVersion.TLSv1_3 - return context - - .. versionadded:: 21.0 - """ - - -class ProxyProtocol(Setting): - name = "proxy_protocol" - section = "Server Mechanics" - cli = ["--proxy-protocol"] - validator = validate_bool - default = False - action = "store_true" - desc = """\ - Enable detect PROXY protocol (PROXY mode). - - Allow using HTTP and Proxy together. It may be useful for work with - stunnel as HTTPS frontend and Gunicorn as HTTP server. - - PROXY protocol: http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt - - Example for stunnel config:: - - [https] - protocol = proxy - accept = 443 - connect = 80 - cert = /etc/ssl/certs/stunnel.pem - key = /etc/ssl/certs/stunnel.key - """ - - -class ProxyAllowFrom(Setting): - name = "proxy_allow_ips" - section = "Server Mechanics" - cli = ["--proxy-allow-from"] - validator = validate_string_to_addr_list - default = "127.0.0.1,::1" - desc = """\ - Front-end's IPs from which allowed accept proxy requests (comma separated). - - Set to ``*`` to disable checking of front-end IPs. This is useful for setups - where you don't know in advance the IP address of front-end, but - instead have ensured via other means that only your - authorized front-ends can access Gunicorn. - - .. note:: - - This option does not affect UNIX socket connections. Connections not associated with - an IP address are treated as allowed, unconditionally. - """ - - -class KeyFile(Setting): - name = "keyfile" - section = "SSL" - cli = ["--keyfile"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - SSL key file - """ - - -class CertFile(Setting): - name = "certfile" - section = "SSL" - cli = ["--certfile"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - SSL certificate file - """ - - -class SSLVersion(Setting): - name = "ssl_version" - section = "SSL" - cli = ["--ssl-version"] - validator = validate_ssl_version - - if hasattr(ssl, "PROTOCOL_TLS"): - default = ssl.PROTOCOL_TLS - else: - default = ssl.PROTOCOL_SSLv23 - - default = ssl.PROTOCOL_SSLv23 - desc = """\ - SSL version to use (see stdlib ssl module's). - - .. deprecated:: 21.0 - The option is deprecated and it is currently ignored. Use :ref:`ssl-context` instead. - - ============= ============ - --ssl-version Description - ============= ============ - SSLv3 SSLv3 is not-secure and is strongly discouraged. - SSLv23 Alias for TLS. Deprecated in Python 3.6, use TLS. - TLS Negotiate highest possible version between client/server. - Can yield SSL. (Python 3.6+) - TLSv1 TLS 1.0 - TLSv1_1 TLS 1.1 (Python 3.4+) - TLSv1_2 TLS 1.2 (Python 3.4+) - TLS_SERVER Auto-negotiate the highest protocol version like TLS, - but only support server-side SSLSocket connections. - (Python 3.6+) - ============= ============ - - .. versionchanged:: 19.7 - The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to - ``ssl.PROTOCOL_SSLv23``. - .. versionchanged:: 20.0 - This setting now accepts string names based on ``ssl.PROTOCOL_`` - constants. - .. versionchanged:: 20.0.1 - The default value has been changed from ``ssl.PROTOCOL_SSLv23`` to - ``ssl.PROTOCOL_TLS`` when Python >= 3.6 . - """ - - -class CertReqs(Setting): - name = "cert_reqs" - section = "SSL" - cli = ["--cert-reqs"] - validator = validate_pos_int - default = ssl.CERT_NONE - desc = """\ - Whether client certificate is required (see stdlib ssl module's) - - =========== =========================== - --cert-reqs Description - =========== =========================== - `0` no client verification - `1` ssl.CERT_OPTIONAL - `2` ssl.CERT_REQUIRED - =========== =========================== - """ - - -class CACerts(Setting): - name = "ca_certs" - section = "SSL" - cli = ["--ca-certs"] - meta = "FILE" - validator = validate_string - default = None - desc = """\ - CA certificates file - """ - - -class SuppressRaggedEOFs(Setting): - name = "suppress_ragged_eofs" - section = "SSL" - cli = ["--suppress-ragged-eofs"] - action = "store_true" - default = True - validator = validate_bool - desc = """\ - Suppress ragged EOFs (see stdlib ssl module's) - """ - - -class DoHandshakeOnConnect(Setting): - name = "do_handshake_on_connect" - section = "SSL" - cli = ["--do-handshake-on-connect"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Whether to perform SSL handshake on socket connect (see stdlib ssl module's) - """ - - -class Ciphers(Setting): - name = "ciphers" - section = "SSL" - cli = ["--ciphers"] - validator = validate_string - default = None - desc = """\ - SSL Cipher suite to use, in the format of an OpenSSL cipher list. - - By default we use the default cipher list from Python's ``ssl`` module, - which contains ciphers considered strong at the time of each Python - release. - - As a recommended alternative, the Open Web App Security Project (OWASP) - offers `a vetted set of strong cipher strings rated A+ to C- - `_. - OWASP provides details on user-agent compatibility at each security level. - - See the `OpenSSL Cipher List Format Documentation - `_ - for details on the format of an OpenSSL cipher list. - """ - - -class PasteGlobalConf(Setting): - name = "raw_paste_global_conf" - action = "append" - section = "Server Mechanics" - cli = ["--paste-global"] - meta = "CONF" - validator = validate_list_string - default = [] - - desc = """\ - Set a PasteDeploy global config variable in ``key=value`` form. - - The option can be specified multiple times. - - The variables are passed to the PasteDeploy entrypoint. Example:: - - $ gunicorn -b 127.0.0.1:8000 --paste development.ini --paste-global FOO=1 --paste-global BAR=2 - - .. versionadded:: 19.7 - """ - - -class PermitObsoleteFolding(Setting): - name = "permit_obsolete_folding" - section = "Server Mechanics" - cli = ["--permit-obsolete-folding"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Permit requests employing obsolete HTTP line folding mechanism - - The folding mechanism was deprecated by rfc7230 Section 3.2.4 and will not be - employed in HTTP request headers from standards-compliant HTTP clients. - - This option is provided to diagnose backwards-incompatible changes. - Use with care and only if necessary. Temporary; the precise effect of this option may - change in a future version, or it may be removed altogether. - - .. versionadded:: 23.0.0 - """ - - -class StripHeaderSpaces(Setting): - name = "strip_header_spaces" - section = "Server Mechanics" - cli = ["--strip-header-spaces"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Strip spaces present between the header name and the the ``:``. - - This is known to induce vulnerabilities and is not compliant with the HTTP/1.1 standard. - See https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn. - - Use with care and only if necessary. Deprecated; scheduled for removal in 25.0.0 - - .. versionadded:: 20.0.1 - """ - - -class PermitUnconventionalHTTPMethod(Setting): - name = "permit_unconventional_http_method" - section = "Server Mechanics" - cli = ["--permit-unconventional-http-method"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Permit HTTP methods not matching conventions, such as IANA registration guidelines - - This permits request methods of length less than 3 or more than 20, - methods with lowercase characters or methods containing the # character. - HTTP methods are case sensitive by definition, and merely uppercase by convention. - - If unset, Gunicorn will apply nonstandard restrictions and cause 400 response status - in cases where otherwise 501 status is expected. While this option does modify that - behaviour, it should not be depended upon to guarantee standards-compliant behaviour. - Rather, it is provided temporarily, to assist in diagnosing backwards-incompatible - changes around the incomplete application of those restrictions. - - Use with care and only if necessary. Temporary; scheduled for removal in 24.0.0 - - .. versionadded:: 22.0.0 - """ - - -class PermitUnconventionalHTTPVersion(Setting): - name = "permit_unconventional_http_version" - section = "Server Mechanics" - cli = ["--permit-unconventional-http-version"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Permit HTTP version not matching conventions of 2023 - - This disables the refusal of likely malformed request lines. - It is unusual to specify HTTP 1 versions other than 1.0 and 1.1. - - This option is provided to diagnose backwards-incompatible changes. - Use with care and only if necessary. Temporary; the precise effect of this option may - change in a future version, or it may be removed altogether. - - .. versionadded:: 22.0.0 - """ - - -class CasefoldHTTPMethod(Setting): - name = "casefold_http_method" - section = "Server Mechanics" - cli = ["--casefold-http-method"] - validator = validate_bool - action = "store_true" - default = False - desc = """\ - Transform received HTTP methods to uppercase - - HTTP methods are case sensitive by definition, and merely uppercase by convention. - - This option is provided because previous versions of gunicorn defaulted to this behaviour. - - Use with care and only if necessary. Deprecated; scheduled for removal in 24.0.0 - - .. versionadded:: 22.0.0 - """ - - -def validate_header_map_behaviour(val): - # FIXME: refactor all of this subclassing stdlib argparse - - if val is None: - return - - if not isinstance(val, str): - raise TypeError("Invalid type for casting: %s" % val) - if val.lower().strip() == "drop": - return "drop" - elif val.lower().strip() == "refuse": - return "refuse" - elif val.lower().strip() == "dangerous": - return "dangerous" - else: - raise ValueError("Invalid header map behaviour: %s" % val) - - -class ForwarderHeaders(Setting): - name = "forwarder_headers" - section = "Server Mechanics" - cli = ["--forwarder-headers"] - validator = validate_string_to_list - default = "SCRIPT_NAME,PATH_INFO" - desc = """\ - - A list containing upper-case header field names that the front-end proxy - (see :ref:`forwarded-allow-ips`) sets, to be used in WSGI environment. - - This option has no effect for headers not present in the request. - - This option can be used to transfer ``SCRIPT_NAME``, ``PATH_INFO`` - and ``REMOTE_USER``. - - It is important that your front-end proxy configuration ensures that - the headers defined here can not be passed directly from the client. - """ - - -class HeaderMap(Setting): - name = "header_map" - section = "Server Mechanics" - cli = ["--header-map"] - validator = validate_header_map_behaviour - default = "drop" - desc = """\ - Configure how header field names are mapped into environ - - Headers containing underscores are permitted by RFC9110, - but gunicorn joining headers of different names into - the same environment variable will dangerously confuse applications as to which is which. - - The safe default ``drop`` is to silently drop headers that cannot be unambiguously mapped. - The value ``refuse`` will return an error if a request contains *any* such header. - The value ``dangerous`` matches the previous, not advisable, behaviour of mapping different - header field names into the same environ name. - - If the source is permitted as explained in :ref:`forwarded-allow-ips`, *and* the header name is - present in :ref:`forwarder-headers`, the header is mapped into environment regardless of - the state of this setting. - - Use with care and only if necessary and after considering if your problem could - instead be solved by specifically renaming or rewriting only the intended headers - on a proxy in front of Gunicorn. - - .. versionadded:: 22.0.0 - """ diff --git a/venv/lib/python3.10/site-packages/gunicorn/debug.py b/venv/lib/python3.10/site-packages/gunicorn/debug.py deleted file mode 100644 index 5fae0b4..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/debug.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -"""The debug module contains utilities and functions for better -debugging Gunicorn.""" - -import sys -import linecache -import re -import inspect - -__all__ = ['spew', 'unspew'] - -_token_spliter = re.compile(r'\W+') - - -class Spew: - - def __init__(self, trace_names=None, show_values=True): - self.trace_names = trace_names - self.show_values = show_values - - def __call__(self, frame, event, arg): - if event == 'line': - lineno = frame.f_lineno - if '__file__' in frame.f_globals: - filename = frame.f_globals['__file__'] - if (filename.endswith('.pyc') or - filename.endswith('.pyo')): - filename = filename[:-1] - name = frame.f_globals['__name__'] - line = linecache.getline(filename, lineno) - else: - name = '[unknown]' - try: - src = inspect.getsourcelines(frame) - line = src[lineno] - except OSError: - line = 'Unknown code named [%s]. VM instruction #%d' % ( - frame.f_code.co_name, frame.f_lasti) - if self.trace_names is None or name in self.trace_names: - print('%s:%s: %s' % (name, lineno, line.rstrip())) - if not self.show_values: - return self - details = [] - tokens = _token_spliter.split(line) - for tok in tokens: - if tok in frame.f_globals: - details.append('%s=%r' % (tok, frame.f_globals[tok])) - if tok in frame.f_locals: - details.append('%s=%r' % (tok, frame.f_locals[tok])) - if details: - print("\t%s" % ' '.join(details)) - return self - - -def spew(trace_names=None, show_values=False): - """Install a trace hook which writes incredibly detailed logs - about what code is being executed to stdout. - """ - sys.settrace(Spew(trace_names, show_values)) - - -def unspew(): - """Remove the trace hook installed by spew. - """ - sys.settrace(None) diff --git a/venv/lib/python3.10/site-packages/gunicorn/errors.py b/venv/lib/python3.10/site-packages/gunicorn/errors.py deleted file mode 100644 index 1128380..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/errors.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -# We don't need to call super() in __init__ methods of our -# BaseException and Exception classes because we also define -# our own __str__ methods so there is no need to pass 'message' -# to the base class to get a meaningful output from 'str(exc)'. -# pylint: disable=super-init-not-called - - -# we inherit from BaseException here to make sure to not be caught -# at application level -class HaltServer(BaseException): - def __init__(self, reason, exit_status=1): - self.reason = reason - self.exit_status = exit_status - - def __str__(self): - return "" % (self.reason, self.exit_status) - - -class ConfigError(Exception): - """ Exception raised on config error """ - - -class AppImportError(Exception): - """ Exception raised when loading an application """ diff --git a/venv/lib/python3.10/site-packages/gunicorn/glogging.py b/venv/lib/python3.10/site-packages/gunicorn/glogging.py deleted file mode 100644 index e34fcd5..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/glogging.py +++ /dev/null @@ -1,473 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import base64 -import binascii -import json -import time -import logging -logging.Logger.manager.emittedNoHandlerWarning = 1 # noqa -from logging.config import dictConfig -from logging.config import fileConfig -import os -import socket -import sys -import threading -import traceback - -from gunicorn import util - - -# syslog facility codes -SYSLOG_FACILITIES = { - "auth": 4, - "authpriv": 10, - "cron": 9, - "daemon": 3, - "ftp": 11, - "kern": 0, - "lpr": 6, - "mail": 2, - "news": 7, - "security": 4, # DEPRECATED - "syslog": 5, - "user": 1, - "uucp": 8, - "local0": 16, - "local1": 17, - "local2": 18, - "local3": 19, - "local4": 20, - "local5": 21, - "local6": 22, - "local7": 23 -} - -CONFIG_DEFAULTS = { - "version": 1, - "disable_existing_loggers": False, - "root": {"level": "INFO", "handlers": ["console"]}, - "loggers": { - "gunicorn.error": { - "level": "INFO", - "handlers": ["error_console"], - "propagate": True, - "qualname": "gunicorn.error" - }, - - "gunicorn.access": { - "level": "INFO", - "handlers": ["console"], - "propagate": True, - "qualname": "gunicorn.access" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "formatter": "generic", - "stream": "ext://sys.stdout" - }, - "error_console": { - "class": "logging.StreamHandler", - "formatter": "generic", - "stream": "ext://sys.stderr" - }, - }, - "formatters": { - "generic": { - "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s", - "datefmt": "[%Y-%m-%d %H:%M:%S %z]", - "class": "logging.Formatter" - } - } -} - - -def loggers(): - """ get list of all loggers """ - root = logging.root - existing = list(root.manager.loggerDict.keys()) - return [logging.getLogger(name) for name in existing] - - -class SafeAtoms(dict): - - def __init__(self, atoms): - dict.__init__(self) - for key, value in atoms.items(): - if isinstance(value, str): - self[key] = value.replace('"', '\\"') - else: - self[key] = value - - def __getitem__(self, k): - if k.startswith("{"): - kl = k.lower() - if kl in self: - return super().__getitem__(kl) - else: - return "-" - if k in self: - return super().__getitem__(k) - else: - return '-' - - -def parse_syslog_address(addr): - - # unix domain socket type depends on backend - # SysLogHandler will try both when given None - if addr.startswith("unix://"): - sock_type = None - - # set socket type only if explicitly requested - parts = addr.split("#", 1) - if len(parts) == 2: - addr = parts[0] - if parts[1] == "dgram": - sock_type = socket.SOCK_DGRAM - - return (sock_type, addr.split("unix://")[1]) - - if addr.startswith("udp://"): - addr = addr.split("udp://")[1] - socktype = socket.SOCK_DGRAM - elif addr.startswith("tcp://"): - addr = addr.split("tcp://")[1] - socktype = socket.SOCK_STREAM - else: - raise RuntimeError("invalid syslog address") - - if '[' in addr and ']' in addr: - host = addr.split(']')[0][1:].lower() - elif ':' in addr: - host = addr.split(':')[0].lower() - elif addr == "": - host = "localhost" - else: - host = addr.lower() - - addr = addr.split(']')[-1] - if ":" in addr: - port = addr.split(':', 1)[1] - if not port.isdigit(): - raise RuntimeError("%r is not a valid port number." % port) - port = int(port) - else: - port = 514 - - return (socktype, (host, port)) - - -class Logger: - - LOG_LEVELS = { - "critical": logging.CRITICAL, - "error": logging.ERROR, - "warning": logging.WARNING, - "info": logging.INFO, - "debug": logging.DEBUG - } - loglevel = logging.INFO - - error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" - datefmt = r"[%Y-%m-%d %H:%M:%S %z]" - - access_fmt = "%(message)s" - syslog_fmt = "[%(process)d] %(message)s" - - atoms_wrapper_class = SafeAtoms - - def __init__(self, cfg): - self.error_log = logging.getLogger("gunicorn.error") - self.error_log.propagate = False - self.access_log = logging.getLogger("gunicorn.access") - self.access_log.propagate = False - self.error_handlers = [] - self.access_handlers = [] - self.logfile = None - self.lock = threading.Lock() - self.cfg = cfg - self.setup(cfg) - - def setup(self, cfg): - self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO) - self.error_log.setLevel(self.loglevel) - self.access_log.setLevel(logging.INFO) - - # set gunicorn.error handler - if self.cfg.capture_output and cfg.errorlog != "-": - for stream in sys.stdout, sys.stderr: - stream.flush() - - self.logfile = open(cfg.errorlog, 'a+') - os.dup2(self.logfile.fileno(), sys.stdout.fileno()) - os.dup2(self.logfile.fileno(), sys.stderr.fileno()) - - self._set_handler(self.error_log, cfg.errorlog, - logging.Formatter(self.error_fmt, self.datefmt)) - - # set gunicorn.access handler - if cfg.accesslog is not None: - self._set_handler( - self.access_log, cfg.accesslog, - fmt=logging.Formatter(self.access_fmt), stream=sys.stdout - ) - - # set syslog handler - if cfg.syslog: - self._set_syslog_handler( - self.error_log, cfg, self.syslog_fmt, "error" - ) - if not cfg.disable_redirect_access_to_syslog: - self._set_syslog_handler( - self.access_log, cfg, self.syslog_fmt, "access" - ) - - if cfg.logconfig_dict: - config = CONFIG_DEFAULTS.copy() - config.update(cfg.logconfig_dict) - try: - dictConfig(config) - except ( - AttributeError, - ImportError, - ValueError, - TypeError - ) as exc: - raise RuntimeError(str(exc)) - elif cfg.logconfig_json: - config = CONFIG_DEFAULTS.copy() - if os.path.exists(cfg.logconfig_json): - try: - config_json = json.load(open(cfg.logconfig_json)) - config.update(config_json) - dictConfig(config) - except ( - json.JSONDecodeError, - AttributeError, - ImportError, - ValueError, - TypeError - ) as exc: - raise RuntimeError(str(exc)) - elif cfg.logconfig: - if os.path.exists(cfg.logconfig): - defaults = CONFIG_DEFAULTS.copy() - defaults['__file__'] = cfg.logconfig - defaults['here'] = os.path.dirname(cfg.logconfig) - fileConfig(cfg.logconfig, defaults=defaults, - disable_existing_loggers=False) - else: - msg = "Error: log config '%s' not found" - raise RuntimeError(msg % cfg.logconfig) - - def critical(self, msg, *args, **kwargs): - self.error_log.critical(msg, *args, **kwargs) - - def error(self, msg, *args, **kwargs): - self.error_log.error(msg, *args, **kwargs) - - def warning(self, msg, *args, **kwargs): - self.error_log.warning(msg, *args, **kwargs) - - def info(self, msg, *args, **kwargs): - self.error_log.info(msg, *args, **kwargs) - - def debug(self, msg, *args, **kwargs): - self.error_log.debug(msg, *args, **kwargs) - - def exception(self, msg, *args, **kwargs): - self.error_log.exception(msg, *args, **kwargs) - - def log(self, lvl, msg, *args, **kwargs): - if isinstance(lvl, str): - lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO) - self.error_log.log(lvl, msg, *args, **kwargs) - - def atoms(self, resp, req, environ, request_time): - """ Gets atoms for log formatting. - """ - status = resp.status - if isinstance(status, str): - status = status.split(None, 1)[0] - atoms = { - 'h': environ.get('REMOTE_ADDR', '-'), - 'l': '-', - 'u': self._get_user(environ) or '-', - 't': self.now(), - 'r': "%s %s %s" % (environ['REQUEST_METHOD'], - environ['RAW_URI'], - environ["SERVER_PROTOCOL"]), - 's': status, - 'm': environ.get('REQUEST_METHOD'), - 'U': environ.get('PATH_INFO'), - 'q': environ.get('QUERY_STRING'), - 'H': environ.get('SERVER_PROTOCOL'), - 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-', - 'B': getattr(resp, 'sent', None), - 'f': environ.get('HTTP_REFERER', '-'), - 'a': environ.get('HTTP_USER_AGENT', '-'), - 'T': request_time.seconds, - 'D': (request_time.seconds * 1000000) + request_time.microseconds, - 'M': (request_time.seconds * 1000) + int(request_time.microseconds / 1000), - 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), - 'p': "<%s>" % os.getpid() - } - - # add request headers - if hasattr(req, 'headers'): - req_headers = req.headers - else: - req_headers = req - - if hasattr(req_headers, "items"): - req_headers = req_headers.items() - - atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers}) - - resp_headers = resp.headers - if hasattr(resp_headers, "items"): - resp_headers = resp_headers.items() - - # add response headers - atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers}) - - # add environ variables - environ_variables = environ.items() - atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) - - return atoms - - def access(self, resp, req, environ, request_time): - """ See http://httpd.apache.org/docs/2.0/logs.html#combined - for format details - """ - - if not (self.cfg.accesslog or self.cfg.logconfig or - self.cfg.logconfig_dict or self.cfg.logconfig_json or - (self.cfg.syslog and not self.cfg.disable_redirect_access_to_syslog)): - return - - # wrap atoms: - # - make sure atoms will be test case insensitively - # - if atom doesn't exist replace it by '-' - safe_atoms = self.atoms_wrapper_class( - self.atoms(resp, req, environ, request_time) - ) - - try: - self.access_log.info(self.cfg.access_log_format, safe_atoms) - except Exception: - self.error(traceback.format_exc()) - - def now(self): - """ return date in Apache Common Log Format """ - return time.strftime('[%d/%b/%Y:%H:%M:%S %z]') - - def reopen_files(self): - if self.cfg.capture_output and self.cfg.errorlog != "-": - for stream in sys.stdout, sys.stderr: - stream.flush() - - with self.lock: - if self.logfile is not None: - self.logfile.close() - self.logfile = open(self.cfg.errorlog, 'a+') - os.dup2(self.logfile.fileno(), sys.stdout.fileno()) - os.dup2(self.logfile.fileno(), sys.stderr.fileno()) - - for log in loggers(): - for handler in log.handlers: - if isinstance(handler, logging.FileHandler): - handler.acquire() - try: - if handler.stream: - handler.close() - handler.stream = handler._open() - finally: - handler.release() - - def close_on_exec(self): - for log in loggers(): - for handler in log.handlers: - if isinstance(handler, logging.FileHandler): - handler.acquire() - try: - if handler.stream: - util.close_on_exec(handler.stream.fileno()) - finally: - handler.release() - - def _get_gunicorn_handler(self, log): - for h in log.handlers: - if getattr(h, "_gunicorn", False): - return h - - def _set_handler(self, log, output, fmt, stream=None): - # remove previous gunicorn log handler - h = self._get_gunicorn_handler(log) - if h: - log.handlers.remove(h) - - if output is not None: - if output == "-": - h = logging.StreamHandler(stream) - else: - util.check_is_writable(output) - h = logging.FileHandler(output) - # make sure the user can reopen the file - try: - os.chown(h.baseFilename, self.cfg.user, self.cfg.group) - except OSError: - # it's probably OK there, we assume the user has given - # /dev/null as a parameter. - pass - - h.setFormatter(fmt) - h._gunicorn = True - log.addHandler(h) - - def _set_syslog_handler(self, log, cfg, fmt, name): - # setup format - prefix = cfg.syslog_prefix or cfg.proc_name.replace(":", ".") - - prefix = "gunicorn.%s.%s" % (prefix, name) - - # set format - fmt = logging.Formatter(r"%s: %s" % (prefix, fmt)) - - # syslog facility - try: - facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()] - except KeyError: - raise RuntimeError("unknown facility name") - - # parse syslog address - socktype, addr = parse_syslog_address(cfg.syslog_addr) - - # finally setup the syslog handler - h = logging.handlers.SysLogHandler(address=addr, - facility=facility, socktype=socktype) - - h.setFormatter(fmt) - h._gunicorn = True - log.addHandler(h) - - def _get_user(self, environ): - user = None - http_auth = environ.get("HTTP_AUTHORIZATION") - if http_auth and http_auth.lower().startswith('basic'): - auth = http_auth.split(" ", 1) - if len(auth) == 2: - try: - # b64decode doesn't accept unicode in Python < 3.3 - # so we need to convert it to a byte string - auth = base64.b64decode(auth[1].strip().encode('utf-8')) - # b64decode returns a byte string - user = auth.split(b":", 1)[0].decode("UTF-8") - except (TypeError, binascii.Error, UnicodeDecodeError) as exc: - self.debug("Couldn't get username: %s", exc) - return user diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__init__.py b/venv/lib/python3.10/site-packages/gunicorn/http/__init__.py deleted file mode 100644 index 11473bb..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -from gunicorn.http.message import Message, Request -from gunicorn.http.parser import RequestParser - -__all__ = ['Message', 'Request', 'RequestParser'] diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index eddf9db..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc deleted file mode 100644 index 3b46e8b..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/body.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc deleted file mode 100644 index fec622a..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/errors.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc deleted file mode 100644 index be70529..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/message.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc deleted file mode 100644 index 93f1016..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/parser.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc deleted file mode 100644 index 7857b27..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/unreader.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc deleted file mode 100644 index 015f37e..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/http/__pycache__/wsgi.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/body.py b/venv/lib/python3.10/site-packages/gunicorn/http/body.py deleted file mode 100644 index d7ee29e..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/body.py +++ /dev/null @@ -1,268 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import io -import sys - -from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator, - InvalidChunkSize) - - -class ChunkedReader: - def __init__(self, req, unreader): - self.req = req - self.parser = self.parse_chunked(unreader) - self.buf = io.BytesIO() - - def read(self, size): - if not isinstance(size, int): - raise TypeError("size must be an integer type") - if size < 0: - raise ValueError("Size must be positive.") - if size == 0: - return b"" - - if self.parser: - while self.buf.tell() < size: - try: - self.buf.write(next(self.parser)) - except StopIteration: - self.parser = None - break - - data = self.buf.getvalue() - ret, rest = data[:size], data[size:] - self.buf = io.BytesIO() - self.buf.write(rest) - return ret - - def parse_trailers(self, unreader, data): - buf = io.BytesIO() - buf.write(data) - - idx = buf.getvalue().find(b"\r\n\r\n") - done = buf.getvalue()[:2] == b"\r\n" - while idx < 0 and not done: - self.get_data(unreader, buf) - idx = buf.getvalue().find(b"\r\n\r\n") - done = buf.getvalue()[:2] == b"\r\n" - if done: - unreader.unread(buf.getvalue()[2:]) - return b"" - self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx], from_trailer=True) - unreader.unread(buf.getvalue()[idx + 4:]) - - def parse_chunked(self, unreader): - (size, rest) = self.parse_chunk_size(unreader) - while size > 0: - while size > len(rest): - size -= len(rest) - yield rest - rest = unreader.read() - if not rest: - raise NoMoreData() - yield rest[:size] - # Remove \r\n after chunk - rest = rest[size:] - while len(rest) < 2: - new_data = unreader.read() - if not new_data: - break - rest += new_data - if rest[:2] != b'\r\n': - raise ChunkMissingTerminator(rest[:2]) - (size, rest) = self.parse_chunk_size(unreader, data=rest[2:]) - - def parse_chunk_size(self, unreader, data=None): - buf = io.BytesIO() - if data is not None: - buf.write(data) - - idx = buf.getvalue().find(b"\r\n") - while idx < 0: - self.get_data(unreader, buf) - idx = buf.getvalue().find(b"\r\n") - - data = buf.getvalue() - line, rest_chunk = data[:idx], data[idx + 2:] - - # RFC9112 7.1.1: BWS before chunk-ext - but ONLY then - chunk_size, *chunk_ext = line.split(b";", 1) - if chunk_ext: - chunk_size = chunk_size.rstrip(b" \t") - if any(n not in b"0123456789abcdefABCDEF" for n in chunk_size): - raise InvalidChunkSize(chunk_size) - if len(chunk_size) == 0: - raise InvalidChunkSize(chunk_size) - chunk_size = int(chunk_size, 16) - - if chunk_size == 0: - try: - self.parse_trailers(unreader, rest_chunk) - except NoMoreData: - pass - return (0, None) - return (chunk_size, rest_chunk) - - def get_data(self, unreader, buf): - data = unreader.read() - if not data: - raise NoMoreData() - buf.write(data) - - -class LengthReader: - def __init__(self, unreader, length): - self.unreader = unreader - self.length = length - - def read(self, size): - if not isinstance(size, int): - raise TypeError("size must be an integral type") - - size = min(self.length, size) - if size < 0: - raise ValueError("Size must be positive.") - if size == 0: - return b"" - - buf = io.BytesIO() - data = self.unreader.read() - while data: - buf.write(data) - if buf.tell() >= size: - break - data = self.unreader.read() - - buf = buf.getvalue() - ret, rest = buf[:size], buf[size:] - self.unreader.unread(rest) - self.length -= size - return ret - - -class EOFReader: - def __init__(self, unreader): - self.unreader = unreader - self.buf = io.BytesIO() - self.finished = False - - def read(self, size): - if not isinstance(size, int): - raise TypeError("size must be an integral type") - if size < 0: - raise ValueError("Size must be positive.") - if size == 0: - return b"" - - if self.finished: - data = self.buf.getvalue() - ret, rest = data[:size], data[size:] - self.buf = io.BytesIO() - self.buf.write(rest) - return ret - - data = self.unreader.read() - while data: - self.buf.write(data) - if self.buf.tell() > size: - break - data = self.unreader.read() - - if not data: - self.finished = True - - data = self.buf.getvalue() - ret, rest = data[:size], data[size:] - self.buf = io.BytesIO() - self.buf.write(rest) - return ret - - -class Body: - def __init__(self, reader): - self.reader = reader - self.buf = io.BytesIO() - - def __iter__(self): - return self - - def __next__(self): - ret = self.readline() - if not ret: - raise StopIteration() - return ret - - next = __next__ - - def getsize(self, size): - if size is None: - return sys.maxsize - elif not isinstance(size, int): - raise TypeError("size must be an integral type") - elif size < 0: - return sys.maxsize - return size - - def read(self, size=None): - size = self.getsize(size) - if size == 0: - return b"" - - if size < self.buf.tell(): - data = self.buf.getvalue() - ret, rest = data[:size], data[size:] - self.buf = io.BytesIO() - self.buf.write(rest) - return ret - - while size > self.buf.tell(): - data = self.reader.read(1024) - if not data: - break - self.buf.write(data) - - data = self.buf.getvalue() - ret, rest = data[:size], data[size:] - self.buf = io.BytesIO() - self.buf.write(rest) - return ret - - def readline(self, size=None): - size = self.getsize(size) - if size == 0: - return b"" - - data = self.buf.getvalue() - self.buf = io.BytesIO() - - ret = [] - while 1: - idx = data.find(b"\n", 0, size) - idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0 - if idx: - ret.append(data[:idx]) - self.buf.write(data[idx:]) - break - - ret.append(data) - size -= len(data) - data = self.reader.read(min(1024, size)) - if not data: - break - - return b"".join(ret) - - def readlines(self, size=None): - ret = [] - data = self.read() - while data: - pos = data.find(b"\n") - if pos < 0: - ret.append(data) - data = b"" - else: - line, data = data[:pos + 1], data[pos + 1:] - ret.append(line) - return ret diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/errors.py b/venv/lib/python3.10/site-packages/gunicorn/http/errors.py deleted file mode 100644 index bcb9700..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/errors.py +++ /dev/null @@ -1,145 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -# We don't need to call super() in __init__ methods of our -# BaseException and Exception classes because we also define -# our own __str__ methods so there is no need to pass 'message' -# to the base class to get a meaningful output from 'str(exc)'. -# pylint: disable=super-init-not-called - - -class ParseException(Exception): - pass - - -class NoMoreData(IOError): - def __init__(self, buf=None): - self.buf = buf - - def __str__(self): - return "No more data after: %r" % self.buf - - -class ConfigurationProblem(ParseException): - def __init__(self, info): - self.info = info - self.code = 500 - - def __str__(self): - return "Configuration problem: %s" % self.info - - -class InvalidRequestLine(ParseException): - def __init__(self, req): - self.req = req - self.code = 400 - - def __str__(self): - return "Invalid HTTP request line: %r" % self.req - - -class InvalidRequestMethod(ParseException): - def __init__(self, method): - self.method = method - - def __str__(self): - return "Invalid HTTP method: %r" % self.method - - -class InvalidHTTPVersion(ParseException): - def __init__(self, version): - self.version = version - - def __str__(self): - return "Invalid HTTP Version: %r" % (self.version,) - - -class InvalidHeader(ParseException): - def __init__(self, hdr, req=None): - self.hdr = hdr - self.req = req - - def __str__(self): - return "Invalid HTTP Header: %r" % self.hdr - - -class ObsoleteFolding(ParseException): - def __init__(self, hdr): - self.hdr = hdr - - def __str__(self): - return "Obsolete line folding is unacceptable: %r" % (self.hdr, ) - - -class InvalidHeaderName(ParseException): - def __init__(self, hdr): - self.hdr = hdr - - def __str__(self): - return "Invalid HTTP header name: %r" % self.hdr - - -class UnsupportedTransferCoding(ParseException): - def __init__(self, hdr): - self.hdr = hdr - self.code = 501 - - def __str__(self): - return "Unsupported transfer coding: %r" % self.hdr - - -class InvalidChunkSize(IOError): - def __init__(self, data): - self.data = data - - def __str__(self): - return "Invalid chunk size: %r" % self.data - - -class ChunkMissingTerminator(IOError): - def __init__(self, term): - self.term = term - - def __str__(self): - return "Invalid chunk terminator is not '\\r\\n': %r" % self.term - - -class LimitRequestLine(ParseException): - def __init__(self, size, max_size): - self.size = size - self.max_size = max_size - - def __str__(self): - return "Request Line is too large (%s > %s)" % (self.size, self.max_size) - - -class LimitRequestHeaders(ParseException): - def __init__(self, msg): - self.msg = msg - - def __str__(self): - return self.msg - - -class InvalidProxyLine(ParseException): - def __init__(self, line): - self.line = line - self.code = 400 - - def __str__(self): - return "Invalid PROXY line: %r" % self.line - - -class ForbiddenProxyRequest(ParseException): - def __init__(self, host): - self.host = host - self.code = 403 - - def __str__(self): - return "Proxy request from %r not allowed" % self.host - - -class InvalidSchemeHeaders(ParseException): - def __str__(self): - return "Contradictory scheme headers" diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/message.py b/venv/lib/python3.10/site-packages/gunicorn/http/message.py deleted file mode 100644 index 59ce0bf..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/message.py +++ /dev/null @@ -1,463 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import io -import re -import socket - -from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body -from gunicorn.http.errors import ( - InvalidHeader, InvalidHeaderName, NoMoreData, - InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion, - LimitRequestLine, LimitRequestHeaders, - UnsupportedTransferCoding, ObsoleteFolding, -) -from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest -from gunicorn.http.errors import InvalidSchemeHeaders -from gunicorn.util import bytes_to_str, split_request_uri - -MAX_REQUEST_LINE = 8190 -MAX_HEADERS = 32768 -DEFAULT_MAX_HEADERFIELD_SIZE = 8190 - -# verbosely on purpose, avoid backslash ambiguity -RFC9110_5_6_2_TOKEN_SPECIALS = r"!#$%&'*+-.^_`|~" -TOKEN_RE = re.compile(r"[%s0-9a-zA-Z]+" % (re.escape(RFC9110_5_6_2_TOKEN_SPECIALS))) -METHOD_BADCHAR_RE = re.compile("[a-z#]") -# usually 1.0 or 1.1 - RFC9112 permits restricting to single-digit versions -VERSION_RE = re.compile(r"HTTP/(\d)\.(\d)") -RFC9110_5_5_INVALID_AND_DANGEROUS = re.compile(r"[\0\r\n]") - - -class Message: - def __init__(self, cfg, unreader, peer_addr): - self.cfg = cfg - self.unreader = unreader - self.peer_addr = peer_addr - self.remote_addr = peer_addr - self.version = None - self.headers = [] - self.trailers = [] - self.body = None - self.scheme = "https" if cfg.is_ssl else "http" - self.must_close = False - - # set headers limits - self.limit_request_fields = cfg.limit_request_fields - if (self.limit_request_fields <= 0 - or self.limit_request_fields > MAX_HEADERS): - self.limit_request_fields = MAX_HEADERS - self.limit_request_field_size = cfg.limit_request_field_size - if self.limit_request_field_size < 0: - self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE - - # set max header buffer size - max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE - self.max_buffer_headers = self.limit_request_fields * \ - (max_header_field_size + 2) + 4 - - unused = self.parse(self.unreader) - self.unreader.unread(unused) - self.set_body_reader() - - def force_close(self): - self.must_close = True - - def parse(self, unreader): - raise NotImplementedError() - - def parse_headers(self, data, from_trailer=False): - cfg = self.cfg - headers = [] - - # Split lines on \r\n - lines = [bytes_to_str(line) for line in data.split(b"\r\n")] - - # handle scheme headers - scheme_header = False - secure_scheme_headers = {} - forwarder_headers = [] - if from_trailer: - # nonsense. either a request is https from the beginning - # .. or we are just behind a proxy who does not remove conflicting trailers - pass - elif ('*' in cfg.forwarded_allow_ips or - not isinstance(self.peer_addr, tuple) - or self.peer_addr[0] in cfg.forwarded_allow_ips): - secure_scheme_headers = cfg.secure_scheme_headers - forwarder_headers = cfg.forwarder_headers - - # Parse headers into key/value pairs paying attention - # to continuation lines. - while lines: - if len(headers) >= self.limit_request_fields: - raise LimitRequestHeaders("limit request headers fields") - - # Parse initial header name: value pair. - curr = lines.pop(0) - header_length = len(curr) + len("\r\n") - if curr.find(":") <= 0: - raise InvalidHeader(curr) - name, value = curr.split(":", 1) - if self.cfg.strip_header_spaces: - name = name.rstrip(" \t") - if not TOKEN_RE.fullmatch(name): - raise InvalidHeaderName(name) - - # this is still a dangerous place to do this - # but it is more correct than doing it before the pattern match: - # after we entered Unicode wonderland, 8bits could case-shift into ASCII: - # b"\xDF".decode("latin-1").upper().encode("ascii") == b"SS" - name = name.upper() - - value = [value.strip(" \t")] - - # Consume value continuation lines.. - while lines and lines[0].startswith((" ", "\t")): - # .. which is obsolete here, and no longer done by default - if not self.cfg.permit_obsolete_folding: - raise ObsoleteFolding(name) - curr = lines.pop(0) - header_length += len(curr) + len("\r\n") - if header_length > self.limit_request_field_size > 0: - raise LimitRequestHeaders("limit request headers " - "fields size") - value.append(curr.strip("\t ")) - value = " ".join(value) - - if RFC9110_5_5_INVALID_AND_DANGEROUS.search(value): - raise InvalidHeader(name) - - if header_length > self.limit_request_field_size > 0: - raise LimitRequestHeaders("limit request headers fields size") - - if name in secure_scheme_headers: - secure = value == secure_scheme_headers[name] - scheme = "https" if secure else "http" - if scheme_header: - if scheme != self.scheme: - raise InvalidSchemeHeaders() - else: - scheme_header = True - self.scheme = scheme - - # ambiguous mapping allows fooling downstream, e.g. merging non-identical headers: - # X-Forwarded-For: 2001:db8::ha:cc:ed - # X_Forwarded_For: 127.0.0.1,::1 - # HTTP_X_FORWARDED_FOR = 2001:db8::ha:cc:ed,127.0.0.1,::1 - # Only modify after fixing *ALL* header transformations; network to wsgi env - if "_" in name: - if name in forwarder_headers or "*" in forwarder_headers: - # This forwarder may override our environment - pass - elif self.cfg.header_map == "dangerous": - # as if we did not know we cannot safely map this - pass - elif self.cfg.header_map == "drop": - # almost as if it never had been there - # but still counts against resource limits - continue - else: - # fail-safe fallthrough: refuse - raise InvalidHeaderName(name) - - headers.append((name, value)) - - return headers - - def set_body_reader(self): - chunked = False - content_length = None - - for (name, value) in self.headers: - if name == "CONTENT-LENGTH": - if content_length is not None: - raise InvalidHeader("CONTENT-LENGTH", req=self) - content_length = value - elif name == "TRANSFER-ENCODING": - # T-E can be a list - # https://datatracker.ietf.org/doc/html/rfc9112#name-transfer-encoding - vals = [v.strip() for v in value.split(',')] - for val in vals: - if val.lower() == "chunked": - # DANGER: transfer codings stack, and stacked chunking is never intended - if chunked: - raise InvalidHeader("TRANSFER-ENCODING", req=self) - chunked = True - elif val.lower() == "identity": - # does not do much, could still plausibly desync from what the proxy does - # safe option: nuke it, its never needed - if chunked: - raise InvalidHeader("TRANSFER-ENCODING", req=self) - elif val.lower() in ('compress', 'deflate', 'gzip'): - # chunked should be the last one - if chunked: - raise InvalidHeader("TRANSFER-ENCODING", req=self) - self.force_close() - else: - raise UnsupportedTransferCoding(value) - - if chunked: - # two potentially dangerous cases: - # a) CL + TE (TE overrides CL.. only safe if the recipient sees it that way too) - # b) chunked HTTP/1.0 (always faulty) - if self.version < (1, 1): - # framing wonky, see RFC 9112 Section 6.1 - raise InvalidHeader("TRANSFER-ENCODING", req=self) - if content_length is not None: - # we cannot be certain the message framing we understood matches proxy intent - # -> whatever happens next, remaining input must not be trusted - raise InvalidHeader("CONTENT-LENGTH", req=self) - self.body = Body(ChunkedReader(self, self.unreader)) - elif content_length is not None: - try: - if str(content_length).isnumeric(): - content_length = int(content_length) - else: - raise InvalidHeader("CONTENT-LENGTH", req=self) - except ValueError: - raise InvalidHeader("CONTENT-LENGTH", req=self) - - if content_length < 0: - raise InvalidHeader("CONTENT-LENGTH", req=self) - - self.body = Body(LengthReader(self.unreader, content_length)) - else: - self.body = Body(EOFReader(self.unreader)) - - def should_close(self): - if self.must_close: - return True - for (h, v) in self.headers: - if h == "CONNECTION": - v = v.lower().strip(" \t") - if v == "close": - return True - elif v == "keep-alive": - return False - break - return self.version <= (1, 0) - - -class Request(Message): - def __init__(self, cfg, unreader, peer_addr, req_number=1): - self.method = None - self.uri = None - self.path = None - self.query = None - self.fragment = None - - # get max request line size - self.limit_request_line = cfg.limit_request_line - if (self.limit_request_line < 0 - or self.limit_request_line >= MAX_REQUEST_LINE): - self.limit_request_line = MAX_REQUEST_LINE - - self.req_number = req_number - self.proxy_protocol_info = None - super().__init__(cfg, unreader, peer_addr) - - def get_data(self, unreader, buf, stop=False): - data = unreader.read() - if not data: - if stop: - raise StopIteration() - raise NoMoreData(buf.getvalue()) - buf.write(data) - - def parse(self, unreader): - buf = io.BytesIO() - self.get_data(unreader, buf, stop=True) - - # get request line - line, rbuf = self.read_line(unreader, buf, self.limit_request_line) - - # proxy protocol - if self.proxy_protocol(bytes_to_str(line)): - # get next request line - buf = io.BytesIO() - buf.write(rbuf) - line, rbuf = self.read_line(unreader, buf, self.limit_request_line) - - self.parse_request_line(line) - buf = io.BytesIO() - buf.write(rbuf) - - # Headers - data = buf.getvalue() - idx = data.find(b"\r\n\r\n") - - done = data[:2] == b"\r\n" - while True: - idx = data.find(b"\r\n\r\n") - done = data[:2] == b"\r\n" - - if idx < 0 and not done: - self.get_data(unreader, buf) - data = buf.getvalue() - if len(data) > self.max_buffer_headers: - raise LimitRequestHeaders("max buffer headers") - else: - break - - if done: - self.unreader.unread(data[2:]) - return b"" - - self.headers = self.parse_headers(data[:idx], from_trailer=False) - - ret = data[idx + 4:] - buf = None - return ret - - def read_line(self, unreader, buf, limit=0): - data = buf.getvalue() - - while True: - idx = data.find(b"\r\n") - if idx >= 0: - # check if the request line is too large - if idx > limit > 0: - raise LimitRequestLine(idx, limit) - break - if len(data) - 2 > limit > 0: - raise LimitRequestLine(len(data), limit) - self.get_data(unreader, buf) - data = buf.getvalue() - - return (data[:idx], # request line, - data[idx + 2:]) # residue in the buffer, skip \r\n - - def proxy_protocol(self, line): - """\ - Detect, check and parse proxy protocol. - - :raises: ForbiddenProxyRequest, InvalidProxyLine. - :return: True for proxy protocol line else False - """ - if not self.cfg.proxy_protocol: - return False - - if self.req_number != 1: - return False - - if not line.startswith("PROXY"): - return False - - self.proxy_protocol_access_check() - self.parse_proxy_protocol(line) - - return True - - def proxy_protocol_access_check(self): - # check in allow list - if ("*" not in self.cfg.proxy_allow_ips and - isinstance(self.peer_addr, tuple) and - self.peer_addr[0] not in self.cfg.proxy_allow_ips): - raise ForbiddenProxyRequest(self.peer_addr[0]) - - def parse_proxy_protocol(self, line): - bits = line.split(" ") - - if len(bits) != 6: - raise InvalidProxyLine(line) - - # Extract data - proto = bits[1] - s_addr = bits[2] - d_addr = bits[3] - - # Validation - if proto not in ["TCP4", "TCP6"]: - raise InvalidProxyLine("protocol '%s' not supported" % proto) - if proto == "TCP4": - try: - socket.inet_pton(socket.AF_INET, s_addr) - socket.inet_pton(socket.AF_INET, d_addr) - except OSError: - raise InvalidProxyLine(line) - elif proto == "TCP6": - try: - socket.inet_pton(socket.AF_INET6, s_addr) - socket.inet_pton(socket.AF_INET6, d_addr) - except OSError: - raise InvalidProxyLine(line) - - try: - s_port = int(bits[4]) - d_port = int(bits[5]) - except ValueError: - raise InvalidProxyLine("invalid port %s" % line) - - if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)): - raise InvalidProxyLine("invalid port %s" % line) - - # Set data - self.proxy_protocol_info = { - "proxy_protocol": proto, - "client_addr": s_addr, - "client_port": s_port, - "proxy_addr": d_addr, - "proxy_port": d_port - } - - def parse_request_line(self, line_bytes): - bits = [bytes_to_str(bit) for bit in line_bytes.split(b" ", 2)] - if len(bits) != 3: - raise InvalidRequestLine(bytes_to_str(line_bytes)) - - # Method: RFC9110 Section 9 - self.method = bits[0] - - # nonstandard restriction, suitable for all IANA registered methods - # partially enforced in previous gunicorn versions - if not self.cfg.permit_unconventional_http_method: - if METHOD_BADCHAR_RE.search(self.method): - raise InvalidRequestMethod(self.method) - if not 3 <= len(bits[0]) <= 20: - raise InvalidRequestMethod(self.method) - # standard restriction: RFC9110 token - if not TOKEN_RE.fullmatch(self.method): - raise InvalidRequestMethod(self.method) - # nonstandard and dangerous - # methods are merely uppercase by convention, no case-insensitive treatment is intended - if self.cfg.casefold_http_method: - self.method = self.method.upper() - - # URI - self.uri = bits[1] - - # Python stdlib explicitly tells us it will not perform validation. - # https://docs.python.org/3/library/urllib.parse.html#url-parsing-security - # There are *four* `request-target` forms in rfc9112, none of them can be empty: - # 1. origin-form, which starts with a slash - # 2. absolute-form, which starts with a non-empty scheme - # 3. authority-form, (for CONNECT) which contains a colon after the host - # 4. asterisk-form, which is an asterisk (`\x2A`) - # => manually reject one always invalid URI: empty - if len(self.uri) == 0: - raise InvalidRequestLine(bytes_to_str(line_bytes)) - - try: - parts = split_request_uri(self.uri) - except ValueError: - raise InvalidRequestLine(bytes_to_str(line_bytes)) - self.path = parts.path or "" - self.query = parts.query or "" - self.fragment = parts.fragment or "" - - # Version - match = VERSION_RE.fullmatch(bits[2]) - if match is None: - raise InvalidHTTPVersion(bits[2]) - self.version = (int(match.group(1)), int(match.group(2))) - if not (1, 0) <= self.version < (2, 0): - # if ever relaxing this, carefully review Content-Encoding processing - if not self.cfg.permit_unconventional_http_version: - raise InvalidHTTPVersion(self.version) - - def set_body_reader(self): - super().set_body_reader() - if isinstance(self.body.reader, EOFReader): - self.body = Body(LengthReader(self.unreader, 0)) diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/parser.py b/venv/lib/python3.10/site-packages/gunicorn/http/parser.py deleted file mode 100644 index 88da17a..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/parser.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -from gunicorn.http.message import Request -from gunicorn.http.unreader import SocketUnreader, IterUnreader - - -class Parser: - - mesg_class = None - - def __init__(self, cfg, source, source_addr): - self.cfg = cfg - if hasattr(source, "recv"): - self.unreader = SocketUnreader(source) - else: - self.unreader = IterUnreader(source) - self.mesg = None - self.source_addr = source_addr - - # request counter (for keepalive connetions) - self.req_count = 0 - - def __iter__(self): - return self - - def __next__(self): - # Stop if HTTP dictates a stop. - if self.mesg and self.mesg.should_close(): - raise StopIteration() - - # Discard any unread body of the previous message - if self.mesg: - data = self.mesg.body.read(8192) - while data: - data = self.mesg.body.read(8192) - - # Parse the next request - self.req_count += 1 - self.mesg = self.mesg_class(self.cfg, self.unreader, self.source_addr, self.req_count) - if not self.mesg: - raise StopIteration() - return self.mesg - - next = __next__ - - -class RequestParser(Parser): - - mesg_class = Request diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/unreader.py b/venv/lib/python3.10/site-packages/gunicorn/http/unreader.py deleted file mode 100644 index 9aadfbc..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/unreader.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import io -import os - -# Classes that can undo reading data from -# a given type of data source. - - -class Unreader: - def __init__(self): - self.buf = io.BytesIO() - - def chunk(self): - raise NotImplementedError() - - def read(self, size=None): - if size is not None and not isinstance(size, int): - raise TypeError("size parameter must be an int or long.") - - if size is not None: - if size == 0: - return b"" - if size < 0: - size = None - - self.buf.seek(0, os.SEEK_END) - - if size is None and self.buf.tell(): - ret = self.buf.getvalue() - self.buf = io.BytesIO() - return ret - if size is None: - d = self.chunk() - return d - - while self.buf.tell() < size: - chunk = self.chunk() - if not chunk: - ret = self.buf.getvalue() - self.buf = io.BytesIO() - return ret - self.buf.write(chunk) - data = self.buf.getvalue() - self.buf = io.BytesIO() - self.buf.write(data[size:]) - return data[:size] - - def unread(self, data): - self.buf.seek(0, os.SEEK_END) - self.buf.write(data) - - -class SocketUnreader(Unreader): - def __init__(self, sock, max_chunk=8192): - super().__init__() - self.sock = sock - self.mxchunk = max_chunk - - def chunk(self): - return self.sock.recv(self.mxchunk) - - -class IterUnreader(Unreader): - def __init__(self, iterable): - super().__init__() - self.iter = iter(iterable) - - def chunk(self): - if not self.iter: - return b"" - try: - return next(self.iter) - except StopIteration: - self.iter = None - return b"" diff --git a/venv/lib/python3.10/site-packages/gunicorn/http/wsgi.py b/venv/lib/python3.10/site-packages/gunicorn/http/wsgi.py deleted file mode 100644 index 419ac50..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/http/wsgi.py +++ /dev/null @@ -1,401 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import io -import logging -import os -import re -import sys - -from gunicorn.http.message import TOKEN_RE -from gunicorn.http.errors import ConfigurationProblem, InvalidHeader, InvalidHeaderName -from gunicorn import SERVER_SOFTWARE, SERVER -from gunicorn import util - -# Send files in at most 1GB blocks as some operating systems can have problems -# with sending files in blocks over 2GB. -BLKSIZE = 0x3FFFFFFF - -# RFC9110 5.5: field-vchar = VCHAR / obs-text -# RFC4234 B.1: VCHAR = 0x21-x07E = printable ASCII -HEADER_VALUE_RE = re.compile(r'[ \t\x21-\x7e\x80-\xff]*') - -log = logging.getLogger(__name__) - - -class FileWrapper: - - def __init__(self, filelike, blksize=8192): - self.filelike = filelike - self.blksize = blksize - if hasattr(filelike, 'close'): - self.close = filelike.close - - def __getitem__(self, key): - data = self.filelike.read(self.blksize) - if data: - return data - raise IndexError - - -class WSGIErrorsWrapper(io.RawIOBase): - - def __init__(self, cfg): - # There is no public __init__ method for RawIOBase so - # we don't need to call super() in the __init__ method. - # pylint: disable=super-init-not-called - errorlog = logging.getLogger("gunicorn.error") - handlers = errorlog.handlers - self.streams = [] - - if cfg.errorlog == "-": - self.streams.append(sys.stderr) - handlers = handlers[1:] - - for h in handlers: - if hasattr(h, "stream"): - self.streams.append(h.stream) - - def write(self, data): - for stream in self.streams: - try: - stream.write(data) - except UnicodeError: - stream.write(data.encode("UTF-8")) - stream.flush() - - -def base_environ(cfg): - return { - "wsgi.errors": WSGIErrorsWrapper(cfg), - "wsgi.version": (1, 0), - "wsgi.multithread": False, - "wsgi.multiprocess": (cfg.workers > 1), - "wsgi.run_once": False, - "wsgi.file_wrapper": FileWrapper, - "wsgi.input_terminated": True, - "SERVER_SOFTWARE": SERVER_SOFTWARE, - } - - -def default_environ(req, sock, cfg): - env = base_environ(cfg) - env.update({ - "wsgi.input": req.body, - "gunicorn.socket": sock, - "REQUEST_METHOD": req.method, - "QUERY_STRING": req.query, - "RAW_URI": req.uri, - "SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version]) - }) - return env - - -def proxy_environ(req): - info = req.proxy_protocol_info - - if not info: - return {} - - return { - "PROXY_PROTOCOL": info["proxy_protocol"], - "REMOTE_ADDR": info["client_addr"], - "REMOTE_PORT": str(info["client_port"]), - "PROXY_ADDR": info["proxy_addr"], - "PROXY_PORT": str(info["proxy_port"]), - } - - -def create(req, sock, client, server, cfg): - resp = Response(req, sock, cfg) - - # set initial environ - environ = default_environ(req, sock, cfg) - - # default variables - host = None - script_name = os.environ.get("SCRIPT_NAME", "") - - # add the headers to the environ - for hdr_name, hdr_value in req.headers: - if hdr_name == "EXPECT": - # handle expect - if hdr_value.lower() == "100-continue": - sock.send(b"HTTP/1.1 100 Continue\r\n\r\n") - elif hdr_name == 'HOST': - host = hdr_value - elif hdr_name == "SCRIPT_NAME": - script_name = hdr_value - elif hdr_name == "CONTENT-TYPE": - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == "CONTENT-LENGTH": - environ['CONTENT_LENGTH'] = hdr_value - continue - - # do not change lightly, this is a common source of security problems - # RFC9110 Section 17.10 discourages ambiguous or incomplete mappings - key = 'HTTP_' + hdr_name.replace('-', '_') - if key in environ: - hdr_value = "%s,%s" % (environ[key], hdr_value) - environ[key] = hdr_value - - # set the url scheme - environ['wsgi.url_scheme'] = req.scheme - - # set the REMOTE_* keys in environ - # authors should be aware that REMOTE_HOST and REMOTE_ADDR - # may not qualify the remote addr: - # http://www.ietf.org/rfc/rfc3875 - if isinstance(client, str): - environ['REMOTE_ADDR'] = client - elif isinstance(client, bytes): - environ['REMOTE_ADDR'] = client.decode() - else: - environ['REMOTE_ADDR'] = client[0] - environ['REMOTE_PORT'] = str(client[1]) - - # handle the SERVER_* - # Normally only the application should use the Host header but since the - # WSGI spec doesn't support unix sockets, we are using it to create - # viable SERVER_* if possible. - if isinstance(server, str): - server = server.split(":") - if len(server) == 1: - # unix socket - if host: - server = host.split(':') - if len(server) == 1: - if req.scheme == "http": - server.append(80) - elif req.scheme == "https": - server.append(443) - else: - server.append('') - else: - # no host header given which means that we are not behind a - # proxy, so append an empty port. - server.append('') - environ['SERVER_NAME'] = server[0] - environ['SERVER_PORT'] = str(server[1]) - - # set the path and script name - path_info = req.path - if script_name: - if not path_info.startswith(script_name): - raise ConfigurationProblem( - "Request path %r does not start with SCRIPT_NAME %r" % - (path_info, script_name)) - path_info = path_info[len(script_name):] - environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info) - environ['SCRIPT_NAME'] = script_name - - # override the environ with the correct remote and server address if - # we are behind a proxy using the proxy protocol. - environ.update(proxy_environ(req)) - return resp, environ - - -class Response: - - def __init__(self, req, sock, cfg): - self.req = req - self.sock = sock - self.version = SERVER - self.status = None - self.chunked = False - self.must_close = False - self.headers = [] - self.headers_sent = False - self.response_length = None - self.sent = 0 - self.upgrade = False - self.cfg = cfg - - def force_close(self): - self.must_close = True - - def should_close(self): - if self.must_close or self.req.should_close(): - return True - if self.response_length is not None or self.chunked: - return False - if self.req.method == 'HEAD': - return False - if self.status_code < 200 or self.status_code in (204, 304): - return False - return True - - def start_response(self, status, headers, exc_info=None): - if exc_info: - try: - if self.status and self.headers_sent: - util.reraise(exc_info[0], exc_info[1], exc_info[2]) - finally: - exc_info = None - elif self.status is not None: - raise AssertionError("Response headers already set!") - - self.status = status - - # get the status code from the response here so we can use it to check - # the need for the connection header later without parsing the string - # each time. - try: - self.status_code = int(self.status.split()[0]) - except ValueError: - self.status_code = None - - self.process_headers(headers) - self.chunked = self.is_chunked() - return self.write - - def process_headers(self, headers): - for name, value in headers: - if not isinstance(name, str): - raise TypeError('%r is not a string' % name) - - if not TOKEN_RE.fullmatch(name): - raise InvalidHeaderName('%r' % name) - - if not isinstance(value, str): - raise TypeError('%r is not a string' % value) - - if not HEADER_VALUE_RE.fullmatch(value): - raise InvalidHeader('%r' % value) - - # RFC9110 5.5 - value = value.strip(" \t") - lname = name.lower() - if lname == "content-length": - self.response_length = int(value) - elif util.is_hoppish(name): - if lname == "connection": - # handle websocket - if value.lower() == "upgrade": - self.upgrade = True - elif lname == "upgrade": - if value.lower() == "websocket": - self.headers.append((name, value)) - - # ignore hopbyhop headers - continue - self.headers.append((name, value)) - - def is_chunked(self): - # Only use chunked responses when the client is - # speaking HTTP/1.1 or newer and there was - # no Content-Length header set. - if self.response_length is not None: - return False - elif self.req.version <= (1, 0): - return False - elif self.req.method == 'HEAD': - # Responses to a HEAD request MUST NOT contain a response body. - return False - elif self.status_code in (204, 304): - # Do not use chunked responses when the response is guaranteed to - # not have a response body. - return False - return True - - def default_headers(self): - # set the connection header - if self.upgrade: - connection = "upgrade" - elif self.should_close(): - connection = "close" - else: - connection = "keep-alive" - - headers = [ - "HTTP/%s.%s %s\r\n" % (self.req.version[0], - self.req.version[1], self.status), - "Server: %s\r\n" % self.version, - "Date: %s\r\n" % util.http_date(), - "Connection: %s\r\n" % connection - ] - if self.chunked: - headers.append("Transfer-Encoding: chunked\r\n") - return headers - - def send_headers(self): - if self.headers_sent: - return - tosend = self.default_headers() - tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) - - header_str = "%s\r\n" % "".join(tosend) - util.write(self.sock, util.to_bytestring(header_str, "latin-1")) - self.headers_sent = True - - def write(self, arg): - self.send_headers() - if not isinstance(arg, bytes): - raise TypeError('%r is not a byte' % arg) - arglen = len(arg) - tosend = arglen - if self.response_length is not None: - if self.sent >= self.response_length: - # Never write more than self.response_length bytes - return - - tosend = min(self.response_length - self.sent, tosend) - if tosend < arglen: - arg = arg[:tosend] - - # Sending an empty chunk signals the end of the - # response and prematurely closes the response - if self.chunked and tosend == 0: - return - - self.sent += tosend - util.write(self.sock, arg, self.chunked) - - def can_sendfile(self): - return self.cfg.sendfile is not False - - def sendfile(self, respiter): - if self.cfg.is_ssl or not self.can_sendfile(): - return False - - if not util.has_fileno(respiter.filelike): - return False - - fileno = respiter.filelike.fileno() - try: - offset = os.lseek(fileno, 0, os.SEEK_CUR) - if self.response_length is None: - filesize = os.fstat(fileno).st_size - nbytes = filesize - offset - else: - nbytes = self.response_length - except (OSError, io.UnsupportedOperation): - return False - - self.send_headers() - - if self.is_chunked(): - chunk_size = "%X\r\n" % nbytes - self.sock.sendall(chunk_size.encode('utf-8')) - if nbytes > 0: - self.sock.sendfile(respiter.filelike, offset=offset, count=nbytes) - - if self.is_chunked(): - self.sock.sendall(b"\r\n") - - os.lseek(fileno, offset, os.SEEK_SET) - - return True - - def write_file(self, respiter): - if not self.sendfile(respiter): - for item in respiter: - self.write(item) - - def close(self): - if not self.headers_sent: - self.send_headers() - if self.chunked: - util.write_chunk(self.sock, b"") diff --git a/venv/lib/python3.10/site-packages/gunicorn/instrument/__init__.py b/venv/lib/python3.10/site-packages/gunicorn/instrument/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 9653521..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc deleted file mode 100644 index 5dede3f..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/instrument/statsd.py b/venv/lib/python3.10/site-packages/gunicorn/instrument/statsd.py deleted file mode 100644 index 7bc4e6f..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/instrument/statsd.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -"Bare-bones implementation of statsD's protocol, client-side" - -import logging -import socket -from re import sub - -from gunicorn.glogging import Logger - -# Instrumentation constants -METRIC_VAR = "metric" -VALUE_VAR = "value" -MTYPE_VAR = "mtype" -GAUGE_TYPE = "gauge" -COUNTER_TYPE = "counter" -HISTOGRAM_TYPE = "histogram" - - -class Statsd(Logger): - """statsD-based instrumentation, that passes as a logger - """ - def __init__(self, cfg): - Logger.__init__(self, cfg) - self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix) - - if isinstance(cfg.statsd_host, str): - address_family = socket.AF_UNIX - else: - address_family = socket.AF_INET - - try: - self.sock = socket.socket(address_family, socket.SOCK_DGRAM) - self.sock.connect(cfg.statsd_host) - except Exception: - self.sock = None - - self.dogstatsd_tags = cfg.dogstatsd_tags - - # Log errors and warnings - def critical(self, msg, *args, **kwargs): - Logger.critical(self, msg, *args, **kwargs) - self.increment("gunicorn.log.critical", 1) - - def error(self, msg, *args, **kwargs): - Logger.error(self, msg, *args, **kwargs) - self.increment("gunicorn.log.error", 1) - - def warning(self, msg, *args, **kwargs): - Logger.warning(self, msg, *args, **kwargs) - self.increment("gunicorn.log.warning", 1) - - def exception(self, msg, *args, **kwargs): - Logger.exception(self, msg, *args, **kwargs) - self.increment("gunicorn.log.exception", 1) - - # Special treatment for info, the most common log level - def info(self, msg, *args, **kwargs): - self.log(logging.INFO, msg, *args, **kwargs) - - # skip the run-of-the-mill logs - def debug(self, msg, *args, **kwargs): - self.log(logging.DEBUG, msg, *args, **kwargs) - - def log(self, lvl, msg, *args, **kwargs): - """Log a given statistic if metric, value and type are present - """ - try: - extra = kwargs.get("extra", None) - if extra is not None: - metric = extra.get(METRIC_VAR, None) - value = extra.get(VALUE_VAR, None) - typ = extra.get(MTYPE_VAR, None) - if metric and value and typ: - if typ == GAUGE_TYPE: - self.gauge(metric, value) - elif typ == COUNTER_TYPE: - self.increment(metric, value) - elif typ == HISTOGRAM_TYPE: - self.histogram(metric, value) - else: - pass - - # Log to parent logger only if there is something to say - if msg: - Logger.log(self, lvl, msg, *args, **kwargs) - except Exception: - Logger.warning(self, "Failed to log to statsd", exc_info=True) - - # access logging - def access(self, resp, req, environ, request_time): - """Measure request duration - request_time is a datetime.timedelta - """ - Logger.access(self, resp, req, environ, request_time) - duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3 - status = resp.status - if isinstance(status, bytes): - status = status.decode('utf-8') - if isinstance(status, str): - status = int(status.split(None, 1)[0]) - self.histogram("gunicorn.request.duration", duration_in_ms) - self.increment("gunicorn.requests", 1) - self.increment("gunicorn.request.status.%d" % status, 1) - - # statsD methods - # you can use those directly if you want - def gauge(self, name, value): - self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value)) - - def increment(self, name, value, sampling_rate=1.0): - self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) - - def decrement(self, name, value, sampling_rate=1.0): - self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) - - def histogram(self, name, value): - self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value)) - - def _sock_send(self, msg): - try: - if isinstance(msg, str): - msg = msg.encode("ascii") - - # http://docs.datadoghq.com/guides/dogstatsd/#datagram-format - if self.dogstatsd_tags: - msg = msg + b"|#" + self.dogstatsd_tags.encode('ascii') - - if self.sock: - self.sock.send(msg) - except Exception: - Logger.warning(self, "Error sending message to statsd", exc_info=True) diff --git a/venv/lib/python3.10/site-packages/gunicorn/pidfile.py b/venv/lib/python3.10/site-packages/gunicorn/pidfile.py deleted file mode 100644 index b171f7d..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/pidfile.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import errno -import os -import tempfile - - -class Pidfile: - """\ - Manage a PID file. If a specific name is provided - it and '"%s.oldpid" % name' will be used. Otherwise - we create a temp file using os.mkstemp. - """ - - def __init__(self, fname): - self.fname = fname - self.pid = None - - def create(self, pid): - oldpid = self.validate() - if oldpid: - if oldpid == os.getpid(): - return - msg = "Already running on PID %s (or pid file '%s' is stale)" - raise RuntimeError(msg % (oldpid, self.fname)) - - self.pid = pid - - # Write pidfile - fdir = os.path.dirname(self.fname) - if fdir and not os.path.isdir(fdir): - raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir) - fd, fname = tempfile.mkstemp(dir=fdir) - os.write(fd, ("%s\n" % self.pid).encode('utf-8')) - if self.fname: - os.rename(fname, self.fname) - else: - self.fname = fname - os.close(fd) - - # set permissions to -rw-r--r-- - os.chmod(self.fname, 420) - - def rename(self, path): - self.unlink() - self.fname = path - self.create(self.pid) - - def unlink(self): - """ delete pidfile""" - try: - with open(self.fname) as f: - pid1 = int(f.read() or 0) - - if pid1 == self.pid: - os.unlink(self.fname) - except Exception: - pass - - def validate(self): - """ Validate pidfile and make it stale if needed""" - if not self.fname: - return - try: - with open(self.fname) as f: - try: - wpid = int(f.read()) - except ValueError: - return - - try: - os.kill(wpid, 0) - return wpid - except OSError as e: - if e.args[0] == errno.EPERM: - return wpid - if e.args[0] == errno.ESRCH: - return - raise - except OSError as e: - if e.args[0] == errno.ENOENT: - return - raise diff --git a/venv/lib/python3.10/site-packages/gunicorn/reloader.py b/venv/lib/python3.10/site-packages/gunicorn/reloader.py deleted file mode 100644 index 1c67f2a..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/reloader.py +++ /dev/null @@ -1,131 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. -# pylint: disable=no-else-continue - -import os -import os.path -import re -import sys -import time -import threading - -COMPILED_EXT_RE = re.compile(r'py[co]$') - - -class Reloader(threading.Thread): - def __init__(self, extra_files=None, interval=1, callback=None): - super().__init__() - self.daemon = True - self._extra_files = set(extra_files or ()) - self._interval = interval - self._callback = callback - - def add_extra_file(self, filename): - self._extra_files.add(filename) - - def get_files(self): - fnames = [ - COMPILED_EXT_RE.sub('py', module.__file__) - for module in tuple(sys.modules.values()) - if getattr(module, '__file__', None) - ] - - fnames.extend(self._extra_files) - - return fnames - - def run(self): - mtimes = {} - while True: - for filename in self.get_files(): - try: - mtime = os.stat(filename).st_mtime - except OSError: - continue - old_time = mtimes.get(filename) - if old_time is None: - mtimes[filename] = mtime - continue - elif mtime > old_time: - if self._callback: - self._callback(filename) - time.sleep(self._interval) - - -has_inotify = False -if sys.platform.startswith('linux'): - try: - from inotify.adapters import Inotify - import inotify.constants - has_inotify = True - except ImportError: - pass - - -if has_inotify: - - class InotifyReloader(threading.Thread): - event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE - | inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY - | inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM - | inotify.constants.IN_MOVED_TO) - - def __init__(self, extra_files=None, callback=None): - super().__init__() - self.daemon = True - self._callback = callback - self._dirs = set() - self._watcher = Inotify() - - for extra_file in extra_files: - self.add_extra_file(extra_file) - - def add_extra_file(self, filename): - dirname = os.path.dirname(filename) - - if dirname in self._dirs: - return - - self._watcher.add_watch(dirname, mask=self.event_mask) - self._dirs.add(dirname) - - def get_dirs(self): - fnames = [ - os.path.dirname(os.path.abspath(COMPILED_EXT_RE.sub('py', module.__file__))) - for module in tuple(sys.modules.values()) - if getattr(module, '__file__', None) - ] - - return set(fnames) - - def run(self): - self._dirs = self.get_dirs() - - for dirname in self._dirs: - if os.path.isdir(dirname): - self._watcher.add_watch(dirname, mask=self.event_mask) - - for event in self._watcher.event_gen(): - if event is None: - continue - - filename = event[3] - - self._callback(filename) - -else: - - class InotifyReloader: - def __init__(self, extra_files=None, callback=None): - raise ImportError('You must have the inotify module installed to ' - 'use the inotify reloader') - - -preferred_reloader = InotifyReloader if has_inotify else Reloader - -reloader_engines = { - 'auto': preferred_reloader, - 'poll': Reloader, - 'inotify': InotifyReloader, -} diff --git a/venv/lib/python3.10/site-packages/gunicorn/sock.py b/venv/lib/python3.10/site-packages/gunicorn/sock.py deleted file mode 100644 index eb2b6fa..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/sock.py +++ /dev/null @@ -1,231 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import errno -import os -import socket -import ssl -import stat -import sys -import time - -from gunicorn import util - - -class BaseSocket: - - def __init__(self, address, conf, log, fd=None): - self.log = log - self.conf = conf - - self.cfg_addr = address - if fd is None: - sock = socket.socket(self.FAMILY, socket.SOCK_STREAM) - bound = False - else: - sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM) - os.close(fd) - bound = True - - self.sock = self.set_options(sock, bound=bound) - - def __str__(self): - return "" % self.sock.fileno() - - def __getattr__(self, name): - return getattr(self.sock, name) - - def set_options(self, sock, bound=False): - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if (self.conf.reuse_port - and hasattr(socket, 'SO_REUSEPORT')): # pragma: no cover - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except OSError as err: - if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL): - raise - if not bound: - self.bind(sock) - sock.setblocking(0) - - # make sure that the socket can be inherited - if hasattr(sock, "set_inheritable"): - sock.set_inheritable(True) - - sock.listen(self.conf.backlog) - return sock - - def bind(self, sock): - sock.bind(self.cfg_addr) - - def close(self): - if self.sock is None: - return - - try: - self.sock.close() - except OSError as e: - self.log.info("Error while closing socket %s", str(e)) - - self.sock = None - - -class TCPSocket(BaseSocket): - - FAMILY = socket.AF_INET - - def __str__(self): - if self.conf.is_ssl: - scheme = "https" - else: - scheme = "http" - - addr = self.sock.getsockname() - return "%s://%s:%d" % (scheme, addr[0], addr[1]) - - def set_options(self, sock, bound=False): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return super().set_options(sock, bound=bound) - - -class TCP6Socket(TCPSocket): - - FAMILY = socket.AF_INET6 - - def __str__(self): - (host, port, _, _) = self.sock.getsockname() - return "http://[%s]:%d" % (host, port) - - -class UnixSocket(BaseSocket): - - FAMILY = socket.AF_UNIX - - def __init__(self, addr, conf, log, fd=None): - if fd is None: - try: - st = os.stat(addr) - except OSError as e: - if e.args[0] != errno.ENOENT: - raise - else: - if stat.S_ISSOCK(st.st_mode): - os.remove(addr) - else: - raise ValueError("%r is not a socket" % addr) - super().__init__(addr, conf, log, fd=fd) - - def __str__(self): - return "unix:%s" % self.cfg_addr - - def bind(self, sock): - old_umask = os.umask(self.conf.umask) - sock.bind(self.cfg_addr) - util.chown(self.cfg_addr, self.conf.uid, self.conf.gid) - os.umask(old_umask) - - -def _sock_type(addr): - if isinstance(addr, tuple): - if util.is_ipv6(addr[0]): - sock_type = TCP6Socket - else: - sock_type = TCPSocket - elif isinstance(addr, (str, bytes)): - sock_type = UnixSocket - else: - raise TypeError("Unable to create socket from: %r" % addr) - return sock_type - - -def create_sockets(conf, log, fds=None): - """ - Create a new socket for the configured addresses or file descriptors. - - If a configured address is a tuple then a TCP socket is created. - If it is a string, a Unix socket is created. Otherwise, a TypeError is - raised. - """ - listeners = [] - - # get it only once - addr = conf.address - fdaddr = [bind for bind in addr if isinstance(bind, int)] - if fds: - fdaddr += list(fds) - laddr = [bind for bind in addr if not isinstance(bind, int)] - - # check ssl config early to raise the error on startup - # only the certfile is needed since it can contains the keyfile - if conf.certfile and not os.path.exists(conf.certfile): - raise ValueError('certfile "%s" does not exist' % conf.certfile) - - if conf.keyfile and not os.path.exists(conf.keyfile): - raise ValueError('keyfile "%s" does not exist' % conf.keyfile) - - # sockets are already bound - if fdaddr: - for fd in fdaddr: - sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) - sock_name = sock.getsockname() - sock_type = _sock_type(sock_name) - listener = sock_type(sock_name, conf, log, fd=fd) - listeners.append(listener) - - return listeners - - # no sockets is bound, first initialization of gunicorn in this env. - for addr in laddr: - sock_type = _sock_type(addr) - sock = None - for i in range(5): - try: - sock = sock_type(addr, conf, log) - except OSError as e: - if e.args[0] == errno.EADDRINUSE: - log.error("Connection in use: %s", str(addr)) - if e.args[0] == errno.EADDRNOTAVAIL: - log.error("Invalid address: %s", str(addr)) - msg = "connection to {addr} failed: {error}" - log.error(msg.format(addr=str(addr), error=str(e))) - if i < 5: - log.debug("Retrying in 1 second.") - time.sleep(1) - else: - break - - if sock is None: - log.error("Can't connect to %s", str(addr)) - sys.exit(1) - - listeners.append(sock) - - return listeners - - -def close_sockets(listeners, unlink=True): - for sock in listeners: - sock_name = sock.getsockname() - sock.close() - if unlink and _sock_type(sock_name) is UnixSocket: - os.unlink(sock_name) - - -def ssl_context(conf): - def default_ssl_context_factory(): - context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH, cafile=conf.ca_certs) - context.load_cert_chain(certfile=conf.certfile, keyfile=conf.keyfile) - context.verify_mode = conf.cert_reqs - if conf.ciphers: - context.set_ciphers(conf.ciphers) - return context - - return conf.ssl_context(conf, default_ssl_context_factory) - - -def ssl_wrap_socket(sock, conf): - return ssl_context(conf).wrap_socket(sock, - server_side=True, - suppress_ragged_eofs=conf.suppress_ragged_eofs, - do_handshake_on_connect=conf.do_handshake_on_connect) diff --git a/venv/lib/python3.10/site-packages/gunicorn/systemd.py b/venv/lib/python3.10/site-packages/gunicorn/systemd.py deleted file mode 100644 index 9b18550..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/systemd.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import os -import socket - -SD_LISTEN_FDS_START = 3 - - -def listen_fds(unset_environment=True): - """ - Get the number of sockets inherited from systemd socket activation. - - :param unset_environment: clear systemd environment variables unless False - :type unset_environment: bool - :return: the number of sockets to inherit from systemd socket activation - :rtype: int - - Returns zero immediately if $LISTEN_PID is not set to the current pid. - Otherwise, returns the number of systemd activation sockets specified by - $LISTEN_FDS. - - When $LISTEN_PID matches the current pid, unsets the environment variables - unless the ``unset_environment`` flag is ``False``. - - .. note:: - Unlike the sd_listen_fds C function, this implementation does not set - the FD_CLOEXEC flag because the gunicorn arbiter never needs to do this. - - .. seealso:: - ``_ - - """ - fds = int(os.environ.get('LISTEN_FDS', 0)) - listen_pid = int(os.environ.get('LISTEN_PID', 0)) - - if listen_pid != os.getpid(): - return 0 - - if unset_environment: - os.environ.pop('LISTEN_PID', None) - os.environ.pop('LISTEN_FDS', None) - - return fds - - -def sd_notify(state, logger, unset_environment=False): - """Send a notification to systemd. state is a string; see - the man page of sd_notify (http://www.freedesktop.org/software/systemd/man/sd_notify.html) - for a description of the allowable values. - - If the unset_environment parameter is True, sd_notify() will unset - the $NOTIFY_SOCKET environment variable before returning (regardless of - whether the function call itself succeeded or not). Further calls to - sd_notify() will then fail, but the variable is no longer inherited by - child processes. - """ - - addr = os.environ.get('NOTIFY_SOCKET') - if addr is None: - # not run in a service, just a noop - return - try: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM | socket.SOCK_CLOEXEC) - if addr[0] == '@': - addr = '\0' + addr[1:] - sock.connect(addr) - sock.sendall(state.encode('utf-8')) - except Exception: - logger.debug("Exception while invoking sd_notify()", exc_info=True) - finally: - if unset_environment: - os.environ.pop('NOTIFY_SOCKET') - sock.close() diff --git a/venv/lib/python3.10/site-packages/gunicorn/util.py b/venv/lib/python3.10/site-packages/gunicorn/util.py deleted file mode 100644 index ecd8174..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/util.py +++ /dev/null @@ -1,653 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. -import ast -import email.utils -import errno -import fcntl -import html -import importlib -import inspect -import io -import logging -import os -import pwd -import random -import re -import socket -import sys -import textwrap -import time -import traceback -import warnings - -try: - import importlib.metadata as importlib_metadata -except (ModuleNotFoundError, ImportError): - import importlib_metadata - -from gunicorn.errors import AppImportError -from gunicorn.workers import SUPPORTED_WORKERS -import urllib.parse - -REDIRECT_TO = getattr(os, 'devnull', '/dev/null') - -# Server and Date aren't technically hop-by-hop -# headers, but they are in the purview of the -# origin server which the WSGI spec says we should -# act like. So we drop them and add our own. -# -# In the future, concatenation server header values -# might be better, but nothing else does it and -# dropping them is easier. -hop_headers = set(""" - connection keep-alive proxy-authenticate proxy-authorization - te trailers transfer-encoding upgrade - server date - """.split()) - -try: - from setproctitle import setproctitle - - def _setproctitle(title): - setproctitle("gunicorn: %s" % title) -except ImportError: - def _setproctitle(title): - pass - - -def load_entry_point(distribution, group, name): - dist_obj = importlib_metadata.distribution(distribution) - eps = [ep for ep in dist_obj.entry_points - if ep.group == group and ep.name == name] - if not eps: - raise ImportError("Entry point %r not found" % ((group, name),)) - return eps[0].load() - - -def load_class(uri, default="gunicorn.workers.sync.SyncWorker", - section="gunicorn.workers"): - if inspect.isclass(uri): - return uri - if uri.startswith("egg:"): - # uses entry points - entry_str = uri.split("egg:")[1] - try: - dist, name = entry_str.rsplit("#", 1) - except ValueError: - dist = entry_str - name = default - - try: - return load_entry_point(dist, section, name) - except Exception: - exc = traceback.format_exc() - msg = "class uri %r invalid or not found: \n\n[%s]" - raise RuntimeError(msg % (uri, exc)) - else: - components = uri.split('.') - if len(components) == 1: - while True: - if uri.startswith("#"): - uri = uri[1:] - - if uri in SUPPORTED_WORKERS: - components = SUPPORTED_WORKERS[uri].split(".") - break - - try: - return load_entry_point( - "gunicorn", section, uri - ) - except Exception: - exc = traceback.format_exc() - msg = "class uri %r invalid or not found: \n\n[%s]" - raise RuntimeError(msg % (uri, exc)) - - klass = components.pop(-1) - - try: - mod = importlib.import_module('.'.join(components)) - except Exception: - exc = traceback.format_exc() - msg = "class uri %r invalid or not found: \n\n[%s]" - raise RuntimeError(msg % (uri, exc)) - return getattr(mod, klass) - - -positionals = ( - inspect.Parameter.POSITIONAL_ONLY, - inspect.Parameter.POSITIONAL_OR_KEYWORD, -) - - -def get_arity(f): - sig = inspect.signature(f) - arity = 0 - - for param in sig.parameters.values(): - if param.kind in positionals: - arity += 1 - - return arity - - -def get_username(uid): - """ get the username for a user id""" - return pwd.getpwuid(uid).pw_name - - -def set_owner_process(uid, gid, initgroups=False): - """ set user and group of workers processes """ - - if gid: - if uid: - try: - username = get_username(uid) - except KeyError: - initgroups = False - - # versions of python < 2.6.2 don't manage unsigned int for - # groups like on osx or fedora - gid = abs(gid) & 0x7FFFFFFF - - if initgroups: - os.initgroups(username, gid) - elif gid != os.getgid(): - os.setgid(gid) - - if uid and uid != os.getuid(): - os.setuid(uid) - - -def chown(path, uid, gid): - os.chown(path, uid, gid) - - -if sys.platform.startswith("win"): - def _waitfor(func, pathname, waitall=False): - # Perform the operation - func(pathname) - # Now setup the wait loop - if waitall: - dirname = pathname - else: - dirname, name = os.path.split(pathname) - dirname = dirname or '.' - # Check for `pathname` to be removed from the filesystem. - # The exponential backoff of the timeout amounts to a total - # of ~1 second after which the deletion is probably an error - # anyway. - # Testing on a i7@4.3GHz shows that usually only 1 iteration is - # required when contention occurs. - timeout = 0.001 - while timeout < 1.0: - # Note we are only testing for the existence of the file(s) in - # the contents of the directory regardless of any security or - # access rights. If we have made it this far, we have sufficient - # permissions to do that much using Python's equivalent of the - # Windows API FindFirstFile. - # Other Windows APIs can fail or give incorrect results when - # dealing with files that are pending deletion. - L = os.listdir(dirname) - if not L if waitall else name in L: - return - # Increase the timeout and try again - time.sleep(timeout) - timeout *= 2 - warnings.warn('tests may fail, delete still pending for ' + pathname, - RuntimeWarning, stacklevel=4) - - def _unlink(filename): - _waitfor(os.unlink, filename) -else: - _unlink = os.unlink - - -def unlink(filename): - try: - _unlink(filename) - except OSError as error: - # The filename need not exist. - if error.errno not in (errno.ENOENT, errno.ENOTDIR): - raise - - -def is_ipv6(addr): - try: - socket.inet_pton(socket.AF_INET6, addr) - except OSError: # not a valid address - return False - except ValueError: # ipv6 not supported on this platform - return False - return True - - -def parse_address(netloc, default_port='8000'): - if re.match(r'unix:(//)?', netloc): - return re.split(r'unix:(//)?', netloc)[-1] - - if netloc.startswith("fd://"): - fd = netloc[5:] - try: - return int(fd) - except ValueError: - raise RuntimeError("%r is not a valid file descriptor." % fd) from None - - if netloc.startswith("tcp://"): - netloc = netloc.split("tcp://")[1] - host, port = netloc, default_port - - if '[' in netloc and ']' in netloc: - host = netloc.split(']')[0][1:] - port = (netloc.split(']:') + [default_port])[1] - elif ':' in netloc: - host, port = (netloc.split(':') + [default_port])[:2] - elif netloc == "": - host, port = "0.0.0.0", default_port - - try: - port = int(port) - except ValueError: - raise RuntimeError("%r is not a valid port number." % port) - - return host.lower(), port - - -def close_on_exec(fd): - flags = fcntl.fcntl(fd, fcntl.F_GETFD) - flags |= fcntl.FD_CLOEXEC - fcntl.fcntl(fd, fcntl.F_SETFD, flags) - - -def set_non_blocking(fd): - flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK - fcntl.fcntl(fd, fcntl.F_SETFL, flags) - - -def close(sock): - try: - sock.close() - except OSError: - pass - - -try: - from os import closerange -except ImportError: - def closerange(fd_low, fd_high): - # Iterate through and close all file descriptors. - for fd in range(fd_low, fd_high): - try: - os.close(fd) - except OSError: # ERROR, fd wasn't open to begin with (ignored) - pass - - -def write_chunk(sock, data): - if isinstance(data, str): - data = data.encode('utf-8') - chunk_size = "%X\r\n" % len(data) - chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"]) - sock.sendall(chunk) - - -def write(sock, data, chunked=False): - if chunked: - return write_chunk(sock, data) - sock.sendall(data) - - -def write_nonblock(sock, data, chunked=False): - timeout = sock.gettimeout() - if timeout != 0.0: - try: - sock.setblocking(0) - return write(sock, data, chunked) - finally: - sock.setblocking(1) - else: - return write(sock, data, chunked) - - -def write_error(sock, status_int, reason, mesg): - html_error = textwrap.dedent("""\ - - - %(reason)s - - -

%(reason)s

- %(mesg)s - - - """) % {"reason": reason, "mesg": html.escape(mesg)} - - http = textwrap.dedent("""\ - HTTP/1.1 %s %s\r - Connection: close\r - Content-Type: text/html\r - Content-Length: %d\r - \r - %s""") % (str(status_int), reason, len(html_error), html_error) - write_nonblock(sock, http.encode('latin1')) - - -def _called_with_wrong_args(f): - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the function raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference in Python 2. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def import_app(module): - parts = module.split(":", 1) - if len(parts) == 1: - obj = "application" - else: - module, obj = parts[0], parts[1] - - try: - mod = importlib.import_module(module) - except ImportError: - if module.endswith(".py") and os.path.exists(module): - msg = "Failed to find application, did you mean '%s:%s'?" - raise ImportError(msg % (module.rsplit(".", 1)[0], obj)) - raise - - # Parse obj as a single expression to determine if it's a valid - # attribute name or function call. - try: - expression = ast.parse(obj, mode="eval").body - except SyntaxError: - raise AppImportError( - "Failed to parse %r as an attribute name or function call." % obj - ) - - if isinstance(expression, ast.Name): - name = expression.id - args = kwargs = None - elif isinstance(expression, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expression.func, ast.Name): - raise AppImportError("Function reference must be a simple name: %r" % obj) - - name = expression.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expression.args] - kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expression.keywords} - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise AppImportError( - "Failed to parse arguments as literal values: %r" % obj - ) - else: - raise AppImportError( - "Failed to parse %r as an attribute name or function call." % obj - ) - - is_debug = logging.root.level == logging.DEBUG - try: - app = getattr(mod, name) - except AttributeError: - if is_debug: - traceback.print_exception(*sys.exc_info()) - raise AppImportError("Failed to find attribute %r in %r." % (name, module)) - - # If the expression was a function call, call the retrieved object - # to get the real application. - if args is not None: - try: - app = app(*args, **kwargs) - except TypeError as e: - # If the TypeError was due to bad arguments to the factory - # function, show Python's nice error message without a - # traceback. - if _called_with_wrong_args(app): - raise AppImportError( - "".join(traceback.format_exception_only(TypeError, e)).strip() - ) - - # Otherwise it was raised from within the function, show the - # full traceback. - raise - - if app is None: - raise AppImportError("Failed to find application object: %r" % obj) - - if not callable(app): - raise AppImportError("Application object must be callable.") - return app - - -def getcwd(): - # get current path, try to use PWD env first - try: - a = os.stat(os.environ['PWD']) - b = os.stat(os.getcwd()) - if a.st_ino == b.st_ino and a.st_dev == b.st_dev: - cwd = os.environ['PWD'] - else: - cwd = os.getcwd() - except Exception: - cwd = os.getcwd() - return cwd - - -def http_date(timestamp=None): - """Return the current date and time formatted for a message header.""" - if timestamp is None: - timestamp = time.time() - s = email.utils.formatdate(timestamp, localtime=False, usegmt=True) - return s - - -def is_hoppish(header): - return header.lower().strip() in hop_headers - - -def daemonize(enable_stdio_inheritance=False): - """\ - Standard daemonization of a process. - http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7 - """ - if 'GUNICORN_FD' not in os.environ: - if os.fork(): - os._exit(0) - os.setsid() - - if os.fork(): - os._exit(0) - - os.umask(0o22) - - # In both the following any file descriptors above stdin - # stdout and stderr are left untouched. The inheritance - # option simply allows one to have output go to a file - # specified by way of shell redirection when not wanting - # to use --error-log option. - - if not enable_stdio_inheritance: - # Remap all of stdin, stdout and stderr on to - # /dev/null. The expectation is that users have - # specified the --error-log option. - - closerange(0, 3) - - fd_null = os.open(REDIRECT_TO, os.O_RDWR) - # PEP 446, make fd for /dev/null inheritable - os.set_inheritable(fd_null, True) - - # expect fd_null to be always 0 here, but in-case not ... - if fd_null != 0: - os.dup2(fd_null, 0) - - os.dup2(fd_null, 1) - os.dup2(fd_null, 2) - - else: - fd_null = os.open(REDIRECT_TO, os.O_RDWR) - - # Always redirect stdin to /dev/null as we would - # never expect to need to read interactive input. - - if fd_null != 0: - os.close(0) - os.dup2(fd_null, 0) - - # If stdout and stderr are still connected to - # their original file descriptors we check to see - # if they are associated with terminal devices. - # When they are we map them to /dev/null so that - # are still detached from any controlling terminal - # properly. If not we preserve them as they are. - # - # If stdin and stdout were not hooked up to the - # original file descriptors, then all bets are - # off and all we can really do is leave them as - # they were. - # - # This will allow 'gunicorn ... > output.log 2>&1' - # to work with stdout/stderr going to the file - # as expected. - # - # Note that if using --error-log option, the log - # file specified through shell redirection will - # only be used up until the log file specified - # by the option takes over. As it replaces stdout - # and stderr at the file descriptor level, then - # anything using stdout or stderr, including having - # cached a reference to them, will still work. - - def redirect(stream, fd_expect): - try: - fd = stream.fileno() - if fd == fd_expect and stream.isatty(): - os.close(fd) - os.dup2(fd_null, fd) - except AttributeError: - pass - - redirect(sys.stdout, 1) - redirect(sys.stderr, 2) - - -def seed(): - try: - random.seed(os.urandom(64)) - except NotImplementedError: - random.seed('%s.%s' % (time.time(), os.getpid())) - - -def check_is_writable(path): - try: - with open(path, 'a') as f: - f.close() - except OSError as e: - raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e)) - - -def to_bytestring(value, encoding="utf8"): - """Converts a string argument to a byte string""" - if isinstance(value, bytes): - return value - if not isinstance(value, str): - raise TypeError('%r is not a string' % value) - - return value.encode(encoding) - - -def has_fileno(obj): - if not hasattr(obj, "fileno"): - return False - - # check BytesIO case and maybe others - try: - obj.fileno() - except (AttributeError, OSError, io.UnsupportedOperation): - return False - - return True - - -def warn(msg): - print("!!!", file=sys.stderr) - - lines = msg.splitlines() - for i, line in enumerate(lines): - if i == 0: - line = "WARNING: %s" % line - print("!!! %s" % line, file=sys.stderr) - - print("!!!\n", file=sys.stderr) - sys.stderr.flush() - - -def make_fail_app(msg): - msg = to_bytestring(msg) - - def app(environ, start_response): - start_response("500 Internal Server Error", [ - ("Content-Type", "text/plain"), - ("Content-Length", str(len(msg))) - ]) - return [msg] - - return app - - -def split_request_uri(uri): - if uri.startswith("//"): - # When the path starts with //, urlsplit considers it as a - # relative uri while the RFC says we should consider it as abs_path - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 - # We use temporary dot prefix to workaround this behaviour - parts = urllib.parse.urlsplit("." + uri) - return parts._replace(path=parts.path[1:]) - - return urllib.parse.urlsplit(uri) - - -# From six.reraise -def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - - -def bytes_to_str(b): - if isinstance(b, str): - return b - return str(b, 'latin1') - - -def unquote_to_wsgi_str(string): - return urllib.parse.unquote_to_bytes(string).decode('latin-1') diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__init__.py b/venv/lib/python3.10/site-packages/gunicorn/workers/__init__.py deleted file mode 100644 index 3da5f85..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -# supported gunicorn workers. -SUPPORTED_WORKERS = { - "sync": "gunicorn.workers.sync.SyncWorker", - "eventlet": "gunicorn.workers.geventlet.EventletWorker", - "gevent": "gunicorn.workers.ggevent.GeventWorker", - "gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", - "gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", - "tornado": "gunicorn.workers.gtornado.TornadoWorker", - "gthread": "gunicorn.workers.gthread.ThreadWorker", -} diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 575f855..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc deleted file mode 100644 index 4963b7a..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc deleted file mode 100644 index 25ccac3..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/base_async.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc deleted file mode 100644 index deafe4e..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc deleted file mode 100644 index 7fcdcae..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc deleted file mode 100644 index 2c2b545..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gthread.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc deleted file mode 100644 index 77c5ffe..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc deleted file mode 100644 index d9f4524..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/sync.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc b/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc deleted file mode 100644 index f77a955..0000000 Binary files a/venv/lib/python3.10/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/base.py b/venv/lib/python3.10/site-packages/gunicorn/workers/base.py deleted file mode 100644 index 93c465c..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/base.py +++ /dev/null @@ -1,287 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import io -import os -import signal -import sys -import time -import traceback -from datetime import datetime -from random import randint -from ssl import SSLError - -from gunicorn import util -from gunicorn.http.errors import ( - ForbiddenProxyRequest, InvalidHeader, - InvalidHeaderName, InvalidHTTPVersion, - InvalidProxyLine, InvalidRequestLine, - InvalidRequestMethod, InvalidSchemeHeaders, - LimitRequestHeaders, LimitRequestLine, - UnsupportedTransferCoding, - ConfigurationProblem, ObsoleteFolding, -) -from gunicorn.http.wsgi import Response, default_environ -from gunicorn.reloader import reloader_engines -from gunicorn.workers.workertmp import WorkerTmp - - -class Worker: - - SIGNALS = [getattr(signal, "SIG%s" % x) for x in ( - "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split() - )] - - PIPE = [] - - def __init__(self, age, ppid, sockets, app, timeout, cfg, log): - """\ - This is called pre-fork so it shouldn't do anything to the - current process. If there's a need to make process wide - changes you'll want to do that in ``self.init_process()``. - """ - self.age = age - self.pid = "[booting]" - self.ppid = ppid - self.sockets = sockets - self.app = app - self.timeout = timeout - self.cfg = cfg - self.booted = False - self.aborted = False - self.reloader = None - - self.nr = 0 - - if cfg.max_requests > 0: - jitter = randint(0, cfg.max_requests_jitter) - self.max_requests = cfg.max_requests + jitter - else: - self.max_requests = sys.maxsize - - self.alive = True - self.log = log - self.tmp = WorkerTmp(cfg) - - def __str__(self): - return "" % self.pid - - def notify(self): - """\ - Your worker subclass must arrange to have this method called - once every ``self.timeout`` seconds. If you fail in accomplishing - this task, the master process will murder your workers. - """ - self.tmp.notify() - - def run(self): - """\ - This is the mainloop of a worker process. You should override - this method in a subclass to provide the intended behaviour - for your particular evil schemes. - """ - raise NotImplementedError() - - def init_process(self): - """\ - If you override this method in a subclass, the last statement - in the function should be to call this method with - super().init_process() so that the ``run()`` loop is initiated. - """ - - # set environment' variables - if self.cfg.env: - for k, v in self.cfg.env.items(): - os.environ[k] = v - - util.set_owner_process(self.cfg.uid, self.cfg.gid, - initgroups=self.cfg.initgroups) - - # Reseed the random number generator - util.seed() - - # For waking ourselves up - self.PIPE = os.pipe() - for p in self.PIPE: - util.set_non_blocking(p) - util.close_on_exec(p) - - # Prevent fd inheritance - for s in self.sockets: - util.close_on_exec(s) - util.close_on_exec(self.tmp.fileno()) - - self.wait_fds = self.sockets + [self.PIPE[0]] - - self.log.close_on_exec() - - self.init_signals() - - # start the reloader - if self.cfg.reload: - def changed(fname): - self.log.info("Worker reloading: %s modified", fname) - self.alive = False - os.write(self.PIPE[1], b"1") - self.cfg.worker_int(self) - time.sleep(0.1) - sys.exit(0) - - reloader_cls = reloader_engines[self.cfg.reload_engine] - self.reloader = reloader_cls(extra_files=self.cfg.reload_extra_files, - callback=changed) - - self.load_wsgi() - if self.reloader: - self.reloader.start() - - self.cfg.post_worker_init(self) - - # Enter main run loop - self.booted = True - self.run() - - def load_wsgi(self): - try: - self.wsgi = self.app.wsgi() - except SyntaxError as e: - if not self.cfg.reload: - raise - - self.log.exception(e) - - # fix from PR #1228 - # storing the traceback into exc_tb will create a circular reference. - # per https://docs.python.org/2/library/sys.html#sys.exc_info warning, - # delete the traceback after use. - try: - _, exc_val, exc_tb = sys.exc_info() - self.reloader.add_extra_file(exc_val.filename) - - tb_string = io.StringIO() - traceback.print_tb(exc_tb, file=tb_string) - self.wsgi = util.make_fail_app(tb_string.getvalue()) - finally: - del exc_tb - - def init_signals(self): - # reset signaling - for s in self.SIGNALS: - signal.signal(s, signal.SIG_DFL) - # init new signaling - signal.signal(signal.SIGQUIT, self.handle_quit) - signal.signal(signal.SIGTERM, self.handle_exit) - signal.signal(signal.SIGINT, self.handle_quit) - signal.signal(signal.SIGWINCH, self.handle_winch) - signal.signal(signal.SIGUSR1, self.handle_usr1) - signal.signal(signal.SIGABRT, self.handle_abort) - - # Don't let SIGTERM and SIGUSR1 disturb active requests - # by interrupting system calls - signal.siginterrupt(signal.SIGTERM, False) - signal.siginterrupt(signal.SIGUSR1, False) - - if hasattr(signal, 'set_wakeup_fd'): - signal.set_wakeup_fd(self.PIPE[1]) - - def handle_usr1(self, sig, frame): - self.log.reopen_files() - - def handle_exit(self, sig, frame): - self.alive = False - - def handle_quit(self, sig, frame): - self.alive = False - # worker_int callback - self.cfg.worker_int(self) - time.sleep(0.1) - sys.exit(0) - - def handle_abort(self, sig, frame): - self.alive = False - self.cfg.worker_abort(self) - sys.exit(1) - - def handle_error(self, req, client, addr, exc): - request_start = datetime.now() - addr = addr or ('', -1) # unix socket case - if isinstance(exc, ( - InvalidRequestLine, InvalidRequestMethod, - InvalidHTTPVersion, InvalidHeader, InvalidHeaderName, - LimitRequestLine, LimitRequestHeaders, - InvalidProxyLine, ForbiddenProxyRequest, - InvalidSchemeHeaders, UnsupportedTransferCoding, - ConfigurationProblem, ObsoleteFolding, - SSLError, - )): - - status_int = 400 - reason = "Bad Request" - - if isinstance(exc, InvalidRequestLine): - mesg = "Invalid Request Line '%s'" % str(exc) - elif isinstance(exc, InvalidRequestMethod): - mesg = "Invalid Method '%s'" % str(exc) - elif isinstance(exc, InvalidHTTPVersion): - mesg = "Invalid HTTP Version '%s'" % str(exc) - elif isinstance(exc, UnsupportedTransferCoding): - mesg = "%s" % str(exc) - status_int = 501 - elif isinstance(exc, ConfigurationProblem): - mesg = "%s" % str(exc) - status_int = 500 - elif isinstance(exc, ObsoleteFolding): - mesg = "%s" % str(exc) - elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)): - mesg = "%s" % str(exc) - if not req and hasattr(exc, "req"): - req = exc.req # for access log - elif isinstance(exc, LimitRequestLine): - mesg = "%s" % str(exc) - elif isinstance(exc, LimitRequestHeaders): - reason = "Request Header Fields Too Large" - mesg = "Error parsing headers: '%s'" % str(exc) - status_int = 431 - elif isinstance(exc, InvalidProxyLine): - mesg = "'%s'" % str(exc) - elif isinstance(exc, ForbiddenProxyRequest): - reason = "Forbidden" - mesg = "Request forbidden" - status_int = 403 - elif isinstance(exc, InvalidSchemeHeaders): - mesg = "%s" % str(exc) - elif isinstance(exc, SSLError): - reason = "Forbidden" - mesg = "'%s'" % str(exc) - status_int = 403 - - msg = "Invalid request from ip={ip}: {error}" - self.log.warning(msg.format(ip=addr[0], error=str(exc))) - else: - if hasattr(req, "uri"): - self.log.exception("Error handling request %s", req.uri) - else: - self.log.exception("Error handling request (no URI read)") - status_int = 500 - reason = "Internal Server Error" - mesg = "" - - if req is not None: - request_time = datetime.now() - request_start - environ = default_environ(req, client, self.cfg) - environ['REMOTE_ADDR'] = addr[0] - environ['REMOTE_PORT'] = str(addr[1]) - resp = Response(req, client, self.cfg) - resp.status = "%s %s" % (status_int, reason) - resp.response_length = len(mesg) - self.log.access(resp, req, environ, request_time) - - try: - util.write_error(client, status_int, reason, mesg) - except Exception: - self.log.debug("Failed to send error message.") - - def handle_winch(self, sig, fname): - # Ignore SIGWINCH in worker. Fixes a crash on OpenBSD. - self.log.debug("worker: SIGWINCH ignored.") diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/base_async.py b/venv/lib/python3.10/site-packages/gunicorn/workers/base_async.py deleted file mode 100644 index 9466d6a..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/base_async.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -from datetime import datetime -import errno -import socket -import ssl -import sys - -from gunicorn import http -from gunicorn.http import wsgi -from gunicorn import util -from gunicorn.workers import base - -ALREADY_HANDLED = object() - - -class AsyncWorker(base.Worker): - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.worker_connections = self.cfg.worker_connections - - def timeout_ctx(self): - raise NotImplementedError() - - def is_already_handled(self, respiter): - # some workers will need to overload this function to raise a StopIteration - return respiter == ALREADY_HANDLED - - def handle(self, listener, client, addr): - req = None - try: - parser = http.RequestParser(self.cfg, client, addr) - try: - listener_name = listener.getsockname() - if not self.cfg.keepalive: - req = next(parser) - self.handle_request(listener_name, req, client, addr) - else: - # keepalive loop - proxy_protocol_info = {} - while True: - req = None - with self.timeout_ctx(): - req = next(parser) - if not req: - break - if req.proxy_protocol_info: - proxy_protocol_info = req.proxy_protocol_info - else: - req.proxy_protocol_info = proxy_protocol_info - self.handle_request(listener_name, req, client, addr) - except http.errors.NoMoreData as e: - self.log.debug("Ignored premature client disconnection. %s", e) - except StopIteration as e: - self.log.debug("Closing connection. %s", e) - except ssl.SSLError: - # pass to next try-except level - util.reraise(*sys.exc_info()) - except OSError: - # pass to next try-except level - util.reraise(*sys.exc_info()) - except Exception as e: - self.handle_error(req, client, addr, e) - except ssl.SSLError as e: - if e.args[0] == ssl.SSL_ERROR_EOF: - self.log.debug("ssl connection closed") - client.close() - else: - self.log.debug("Error processing SSL request.") - self.handle_error(req, client, addr, e) - except OSError as e: - if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): - self.log.exception("Socket error processing request.") - else: - if e.errno == errno.ECONNRESET: - self.log.debug("Ignoring connection reset") - elif e.errno == errno.ENOTCONN: - self.log.debug("Ignoring socket not connected") - else: - self.log.debug("Ignoring EPIPE") - except BaseException as e: - self.handle_error(req, client, addr, e) - finally: - util.close(client) - - def handle_request(self, listener_name, req, sock, addr): - request_start = datetime.now() - environ = {} - resp = None - try: - self.cfg.pre_request(self, req) - resp, environ = wsgi.create(req, sock, addr, - listener_name, self.cfg) - environ["wsgi.multithread"] = True - self.nr += 1 - if self.nr >= self.max_requests: - if self.alive: - self.log.info("Autorestarting worker after current request.") - self.alive = False - - if not self.alive or not self.cfg.keepalive: - resp.force_close() - - respiter = self.wsgi(environ, resp.start_response) - if self.is_already_handled(respiter): - return False - try: - if isinstance(respiter, environ['wsgi.file_wrapper']): - resp.write_file(respiter) - else: - for item in respiter: - resp.write(item) - resp.close() - finally: - request_time = datetime.now() - request_start - self.log.access(resp, req, environ, request_time) - if hasattr(respiter, "close"): - respiter.close() - if resp.should_close(): - raise StopIteration() - except StopIteration: - raise - except OSError: - # If the original exception was a socket.error we delegate - # handling it to the caller (where handle() might ignore it) - util.reraise(*sys.exc_info()) - except Exception: - if resp and resp.headers_sent: - # If the requests have already been sent, we should close the - # connection to indicate the error. - self.log.exception("Error handling request") - try: - sock.shutdown(socket.SHUT_RDWR) - sock.close() - except OSError: - pass - raise StopIteration() - raise - finally: - try: - self.cfg.post_request(self, req, environ, resp) - except Exception: - self.log.exception("Exception in post_request hook") - return True diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/geventlet.py b/venv/lib/python3.10/site-packages/gunicorn/workers/geventlet.py deleted file mode 100644 index 087eb61..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/geventlet.py +++ /dev/null @@ -1,186 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -from functools import partial -import sys - -try: - import eventlet -except ImportError: - raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") -else: - from packaging.version import parse as parse_version - if parse_version(eventlet.__version__) < parse_version('0.24.1'): - raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") - -from eventlet import hubs, greenthread -from eventlet.greenio import GreenSocket -import eventlet.wsgi -import greenlet - -from gunicorn.workers.base_async import AsyncWorker -from gunicorn.sock import ssl_wrap_socket - -# ALREADY_HANDLED is removed in 0.30.3+ now it's `WSGI_LOCAL.already_handled: bool` -# https://github.com/eventlet/eventlet/pull/544 -EVENTLET_WSGI_LOCAL = getattr(eventlet.wsgi, "WSGI_LOCAL", None) -EVENTLET_ALREADY_HANDLED = getattr(eventlet.wsgi, "ALREADY_HANDLED", None) - - -def _eventlet_socket_sendfile(self, file, offset=0, count=None): - # Based on the implementation in gevent which in turn is slightly - # modified from the standard library implementation. - if self.gettimeout() == 0: - raise ValueError("non-blocking sockets are not supported") - if offset: - file.seek(offset) - blocksize = min(count, 8192) if count else 8192 - total_sent = 0 - # localize variable access to minimize overhead - file_read = file.read - sock_send = self.send - try: - while True: - if count: - blocksize = min(count - total_sent, blocksize) - if blocksize <= 0: - break - data = memoryview(file_read(blocksize)) - if not data: - break # EOF - while True: - try: - sent = sock_send(data) - except BlockingIOError: - continue - else: - total_sent += sent - if sent < len(data): - data = data[sent:] - else: - break - return total_sent - finally: - if total_sent > 0 and hasattr(file, 'seek'): - file.seek(offset + total_sent) - - -def _eventlet_serve(sock, handle, concurrency): - """ - Serve requests forever. - - This code is nearly identical to ``eventlet.convenience.serve`` except - that it attempts to join the pool at the end, which allows for gunicorn - graceful shutdowns. - """ - pool = eventlet.greenpool.GreenPool(concurrency) - server_gt = eventlet.greenthread.getcurrent() - - while True: - try: - conn, addr = sock.accept() - gt = pool.spawn(handle, conn, addr) - gt.link(_eventlet_stop, server_gt, conn) - conn, addr, gt = None, None, None - except eventlet.StopServe: - sock.close() - pool.waitall() - return - - -def _eventlet_stop(client, server, conn): - """ - Stop a greenlet handling a request and close its connection. - - This code is lifted from eventlet so as not to depend on undocumented - functions in the library. - """ - try: - try: - client.wait() - finally: - conn.close() - except greenlet.GreenletExit: - pass - except Exception: - greenthread.kill(server, *sys.exc_info()) - - -def patch_sendfile(): - # As of eventlet 0.25.1, GreenSocket.sendfile doesn't exist, - # meaning the native implementations of socket.sendfile will be used. - # If os.sendfile exists, it will attempt to use that, failing explicitly - # if the socket is in non-blocking mode, which the underlying - # socket object /is/. Even the regular _sendfile_use_send will - # fail in that way; plus, it would use the underlying socket.send which isn't - # properly cooperative. So we have to monkey-patch a working socket.sendfile() - # into GreenSocket; in this method, `self.send` will be the GreenSocket's - # send method which is properly cooperative. - if not hasattr(GreenSocket, 'sendfile'): - GreenSocket.sendfile = _eventlet_socket_sendfile - - -class EventletWorker(AsyncWorker): - - def patch(self): - hubs.use_hub() - eventlet.monkey_patch() - patch_sendfile() - - def is_already_handled(self, respiter): - # eventlet >= 0.30.3 - if getattr(EVENTLET_WSGI_LOCAL, "already_handled", None): - raise StopIteration() - # eventlet < 0.30.3 - if respiter == EVENTLET_ALREADY_HANDLED: - raise StopIteration() - return super().is_already_handled(respiter) - - def init_process(self): - self.patch() - super().init_process() - - def handle_quit(self, sig, frame): - eventlet.spawn(super().handle_quit, sig, frame) - - def handle_usr1(self, sig, frame): - eventlet.spawn(super().handle_usr1, sig, frame) - - def timeout_ctx(self): - return eventlet.Timeout(self.cfg.keepalive or None, False) - - def handle(self, listener, client, addr): - if self.cfg.is_ssl: - client = ssl_wrap_socket(client, self.cfg) - super().handle(listener, client, addr) - - def run(self): - acceptors = [] - for sock in self.sockets: - gsock = GreenSocket(sock) - gsock.setblocking(1) - hfun = partial(self.handle, gsock) - acceptor = eventlet.spawn(_eventlet_serve, gsock, hfun, - self.worker_connections) - - acceptors.append(acceptor) - eventlet.sleep(0.0) - - while self.alive: - self.notify() - eventlet.sleep(1.0) - - self.notify() - t = None - try: - with eventlet.Timeout(self.cfg.graceful_timeout) as t: - for a in acceptors: - a.kill(eventlet.StopServe()) - for a in acceptors: - a.wait() - except eventlet.Timeout as te: - if te != t: - raise - for a in acceptors: - a.kill() diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/ggevent.py b/venv/lib/python3.10/site-packages/gunicorn/workers/ggevent.py deleted file mode 100644 index b9b9b44..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/ggevent.py +++ /dev/null @@ -1,193 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import os -import sys -from datetime import datetime -from functools import partial -import time - -try: - import gevent -except ImportError: - raise RuntimeError("gevent worker requires gevent 1.4 or higher") -else: - from packaging.version import parse as parse_version - if parse_version(gevent.__version__) < parse_version('1.4'): - raise RuntimeError("gevent worker requires gevent 1.4 or higher") - -from gevent.pool import Pool -from gevent.server import StreamServer -from gevent import hub, monkey, socket, pywsgi - -import gunicorn -from gunicorn.http.wsgi import base_environ -from gunicorn.sock import ssl_context -from gunicorn.workers.base_async import AsyncWorker - -VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__) - - -class GeventWorker(AsyncWorker): - - server_class = None - wsgi_handler = None - - def patch(self): - monkey.patch_all() - - # patch sockets - sockets = [] - for s in self.sockets: - sockets.append(socket.socket(s.FAMILY, socket.SOCK_STREAM, - fileno=s.sock.fileno())) - self.sockets = sockets - - def notify(self): - super().notify() - if self.ppid != os.getppid(): - self.log.info("Parent changed, shutting down: %s", self) - sys.exit(0) - - def timeout_ctx(self): - return gevent.Timeout(self.cfg.keepalive, False) - - def run(self): - servers = [] - ssl_args = {} - - if self.cfg.is_ssl: - ssl_args = {"ssl_context": ssl_context(self.cfg)} - - for s in self.sockets: - s.setblocking(1) - pool = Pool(self.worker_connections) - if self.server_class is not None: - environ = base_environ(self.cfg) - environ.update({ - "wsgi.multithread": True, - "SERVER_SOFTWARE": VERSION, - }) - server = self.server_class( - s, application=self.wsgi, spawn=pool, log=self.log, - handler_class=self.wsgi_handler, environ=environ, - **ssl_args) - else: - hfun = partial(self.handle, s) - server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args) - if self.cfg.workers > 1: - server.max_accept = 1 - - server.start() - servers.append(server) - - while self.alive: - self.notify() - gevent.sleep(1.0) - - try: - # Stop accepting requests - for server in servers: - if hasattr(server, 'close'): # gevent 1.0 - server.close() - if hasattr(server, 'kill'): # gevent < 1.0 - server.kill() - - # Handle current requests until graceful_timeout - ts = time.time() - while time.time() - ts <= self.cfg.graceful_timeout: - accepting = 0 - for server in servers: - if server.pool.free_count() != server.pool.size: - accepting += 1 - - # if no server is accepting a connection, we can exit - if not accepting: - return - - self.notify() - gevent.sleep(1.0) - - # Force kill all active the handlers - self.log.warning("Worker graceful timeout (pid:%s)", self.pid) - for server in servers: - server.stop(timeout=1) - except Exception: - pass - - def handle(self, listener, client, addr): - # Connected socket timeout defaults to socket.getdefaulttimeout(). - # This forces to blocking mode. - client.setblocking(1) - super().handle(listener, client, addr) - - def handle_request(self, listener_name, req, sock, addr): - try: - super().handle_request(listener_name, req, sock, addr) - except gevent.GreenletExit: - pass - except SystemExit: - pass - - def handle_quit(self, sig, frame): - # Move this out of the signal handler so we can use - # blocking calls. See #1126 - gevent.spawn(super().handle_quit, sig, frame) - - def handle_usr1(self, sig, frame): - # Make the gevent workers handle the usr1 signal - # by deferring to a new greenlet. See #1645 - gevent.spawn(super().handle_usr1, sig, frame) - - def init_process(self): - self.patch() - hub.reinit() - super().init_process() - - -class GeventResponse: - - status = None - headers = None - sent = None - - def __init__(self, status, headers, clength): - self.status = status - self.headers = headers - self.sent = clength - - -class PyWSGIHandler(pywsgi.WSGIHandler): - - def log_request(self): - start = datetime.fromtimestamp(self.time_start) - finish = datetime.fromtimestamp(self.time_finish) - response_time = finish - start - resp_headers = getattr(self, 'response_headers', {}) - - # Status is expected to be a string but is encoded to bytes in gevent for PY3 - # Except when it isn't because gevent uses hardcoded strings for network errors. - status = self.status.decode() if isinstance(self.status, bytes) else self.status - resp = GeventResponse(status, resp_headers, self.response_length) - if hasattr(self, 'headers'): - req_headers = self.headers.items() - else: - req_headers = [] - self.server.log.access(resp, req_headers, self.environ, response_time) - - def get_environ(self): - env = super().get_environ() - env['gunicorn.sock'] = self.socket - env['RAW_URI'] = self.path - return env - - -class PyWSGIServer(pywsgi.WSGIServer): - pass - - -class GeventPyWSGIWorker(GeventWorker): - "The Gevent StreamServer based workers." - server_class = PyWSGIServer - wsgi_handler = PyWSGIHandler diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/gthread.py b/venv/lib/python3.10/site-packages/gunicorn/workers/gthread.py deleted file mode 100644 index 7a23228..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/gthread.py +++ /dev/null @@ -1,372 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -# design: -# A threaded worker accepts connections in the main loop, accepted -# connections are added to the thread pool as a connection job. -# Keepalive connections are put back in the loop waiting for an event. -# If no event happen after the keep alive timeout, the connection is -# closed. -# pylint: disable=no-else-break - -from concurrent import futures -import errno -import os -import selectors -import socket -import ssl -import sys -import time -from collections import deque -from datetime import datetime -from functools import partial -from threading import RLock - -from . import base -from .. import http -from .. import util -from .. import sock -from ..http import wsgi - - -class TConn: - - def __init__(self, cfg, sock, client, server): - self.cfg = cfg - self.sock = sock - self.client = client - self.server = server - - self.timeout = None - self.parser = None - self.initialized = False - - # set the socket to non blocking - self.sock.setblocking(False) - - def init(self): - self.initialized = True - self.sock.setblocking(True) - - if self.parser is None: - # wrap the socket if needed - if self.cfg.is_ssl: - self.sock = sock.ssl_wrap_socket(self.sock, self.cfg) - - # initialize the parser - self.parser = http.RequestParser(self.cfg, self.sock, self.client) - - def set_timeout(self): - # set the timeout - self.timeout = time.time() + self.cfg.keepalive - - def close(self): - util.close(self.sock) - - -class ThreadWorker(base.Worker): - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.worker_connections = self.cfg.worker_connections - self.max_keepalived = self.cfg.worker_connections - self.cfg.threads - # initialise the pool - self.tpool = None - self.poller = None - self._lock = None - self.futures = deque() - self._keep = deque() - self.nr_conns = 0 - - @classmethod - def check_config(cls, cfg, log): - max_keepalived = cfg.worker_connections - cfg.threads - - if max_keepalived <= 0 and cfg.keepalive: - log.warning("No keepalived connections can be handled. " + - "Check the number of worker connections and threads.") - - def init_process(self): - self.tpool = self.get_thread_pool() - self.poller = selectors.DefaultSelector() - self._lock = RLock() - super().init_process() - - def get_thread_pool(self): - """Override this method to customize how the thread pool is created""" - return futures.ThreadPoolExecutor(max_workers=self.cfg.threads) - - def handle_quit(self, sig, frame): - self.alive = False - # worker_int callback - self.cfg.worker_int(self) - self.tpool.shutdown(False) - time.sleep(0.1) - sys.exit(0) - - def _wrap_future(self, fs, conn): - fs.conn = conn - self.futures.append(fs) - fs.add_done_callback(self.finish_request) - - def enqueue_req(self, conn): - conn.init() - # submit the connection to a worker - fs = self.tpool.submit(self.handle, conn) - self._wrap_future(fs, conn) - - def accept(self, server, listener): - try: - sock, client = listener.accept() - # initialize the connection object - conn = TConn(self.cfg, sock, client, server) - - self.nr_conns += 1 - # wait until socket is readable - with self._lock: - self.poller.register(conn.sock, selectors.EVENT_READ, - partial(self.on_client_socket_readable, conn)) - except OSError as e: - if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, - errno.EWOULDBLOCK): - raise - - def on_client_socket_readable(self, conn, client): - with self._lock: - # unregister the client from the poller - self.poller.unregister(client) - - if conn.initialized: - # remove the connection from keepalive - try: - self._keep.remove(conn) - except ValueError: - # race condition - return - - # submit the connection to a worker - self.enqueue_req(conn) - - def murder_keepalived(self): - now = time.time() - while True: - with self._lock: - try: - # remove the connection from the queue - conn = self._keep.popleft() - except IndexError: - break - - delta = conn.timeout - now - if delta > 0: - # add the connection back to the queue - with self._lock: - self._keep.appendleft(conn) - break - else: - self.nr_conns -= 1 - # remove the socket from the poller - with self._lock: - try: - self.poller.unregister(conn.sock) - except OSError as e: - if e.errno != errno.EBADF: - raise - except KeyError: - # already removed by the system, continue - pass - except ValueError: - # already removed by the system continue - pass - - # close the socket - conn.close() - - def is_parent_alive(self): - # If our parent changed then we shut down. - if self.ppid != os.getppid(): - self.log.info("Parent changed, shutting down: %s", self) - return False - return True - - def run(self): - # init listeners, add them to the event loop - for sock in self.sockets: - sock.setblocking(False) - # a race condition during graceful shutdown may make the listener - # name unavailable in the request handler so capture it once here - server = sock.getsockname() - acceptor = partial(self.accept, server) - self.poller.register(sock, selectors.EVENT_READ, acceptor) - - while self.alive: - # notify the arbiter we are alive - self.notify() - - # can we accept more connections? - if self.nr_conns < self.worker_connections: - # wait for an event - events = self.poller.select(1.0) - for key, _ in events: - callback = key.data - callback(key.fileobj) - - # check (but do not wait) for finished requests - result = futures.wait(self.futures, timeout=0, - return_when=futures.FIRST_COMPLETED) - else: - # wait for a request to finish - result = futures.wait(self.futures, timeout=1.0, - return_when=futures.FIRST_COMPLETED) - - # clean up finished requests - for fut in result.done: - self.futures.remove(fut) - - if not self.is_parent_alive(): - break - - # handle keepalive timeouts - self.murder_keepalived() - - self.tpool.shutdown(False) - self.poller.close() - - for s in self.sockets: - s.close() - - futures.wait(self.futures, timeout=self.cfg.graceful_timeout) - - def finish_request(self, fs): - if fs.cancelled(): - self.nr_conns -= 1 - fs.conn.close() - return - - try: - (keepalive, conn) = fs.result() - # if the connection should be kept alived add it - # to the eventloop and record it - if keepalive and self.alive: - # flag the socket as non blocked - conn.sock.setblocking(False) - - # register the connection - conn.set_timeout() - with self._lock: - self._keep.append(conn) - - # add the socket to the event loop - self.poller.register(conn.sock, selectors.EVENT_READ, - partial(self.on_client_socket_readable, conn)) - else: - self.nr_conns -= 1 - conn.close() - except Exception: - # an exception happened, make sure to close the - # socket. - self.nr_conns -= 1 - fs.conn.close() - - def handle(self, conn): - keepalive = False - req = None - try: - req = next(conn.parser) - if not req: - return (False, conn) - - # handle the request - keepalive = self.handle_request(req, conn) - if keepalive: - return (keepalive, conn) - except http.errors.NoMoreData as e: - self.log.debug("Ignored premature client disconnection. %s", e) - - except StopIteration as e: - self.log.debug("Closing connection. %s", e) - except ssl.SSLError as e: - if e.args[0] == ssl.SSL_ERROR_EOF: - self.log.debug("ssl connection closed") - conn.sock.close() - else: - self.log.debug("Error processing SSL request.") - self.handle_error(req, conn.sock, conn.client, e) - - except OSError as e: - if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): - self.log.exception("Socket error processing request.") - else: - if e.errno == errno.ECONNRESET: - self.log.debug("Ignoring connection reset") - elif e.errno == errno.ENOTCONN: - self.log.debug("Ignoring socket not connected") - else: - self.log.debug("Ignoring connection epipe") - except Exception as e: - self.handle_error(req, conn.sock, conn.client, e) - - return (False, conn) - - def handle_request(self, req, conn): - environ = {} - resp = None - try: - self.cfg.pre_request(self, req) - request_start = datetime.now() - resp, environ = wsgi.create(req, conn.sock, conn.client, - conn.server, self.cfg) - environ["wsgi.multithread"] = True - self.nr += 1 - if self.nr >= self.max_requests: - if self.alive: - self.log.info("Autorestarting worker after current request.") - self.alive = False - resp.force_close() - - if not self.alive or not self.cfg.keepalive: - resp.force_close() - elif len(self._keep) >= self.max_keepalived: - resp.force_close() - - respiter = self.wsgi(environ, resp.start_response) - try: - if isinstance(respiter, environ['wsgi.file_wrapper']): - resp.write_file(respiter) - else: - for item in respiter: - resp.write(item) - - resp.close() - finally: - request_time = datetime.now() - request_start - self.log.access(resp, req, environ, request_time) - if hasattr(respiter, "close"): - respiter.close() - - if resp.should_close(): - self.log.debug("Closing connection.") - return False - except OSError: - # pass to next try-except level - util.reraise(*sys.exc_info()) - except Exception: - if resp and resp.headers_sent: - # If the requests have already been sent, we should close the - # connection to indicate the error. - self.log.exception("Error handling request") - try: - conn.sock.shutdown(socket.SHUT_RDWR) - conn.sock.close() - except OSError: - pass - raise StopIteration() - raise - finally: - try: - self.cfg.post_request(self, req, environ, resp) - except Exception: - self.log.exception("Exception in post_request hook") - - return True diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/gtornado.py b/venv/lib/python3.10/site-packages/gunicorn/workers/gtornado.py deleted file mode 100644 index 544af7d..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/gtornado.py +++ /dev/null @@ -1,166 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import os -import sys - -try: - import tornado -except ImportError: - raise RuntimeError("You need tornado installed to use this worker.") -import tornado.web -import tornado.httpserver -from tornado.ioloop import IOLoop, PeriodicCallback -from tornado.wsgi import WSGIContainer -from gunicorn.workers.base import Worker -from gunicorn import __version__ as gversion -from gunicorn.sock import ssl_context - - -# Tornado 5.0 updated its IOLoop, and the `io_loop` arguments to many -# Tornado functions have been removed in Tornado 5.0. Also, they no -# longer store PeriodCallbacks in ioloop._callbacks. Instead we store -# them on our side, and use stop() on them when stopping the worker. -# See https://www.tornadoweb.org/en/stable/releases/v5.0.0.html#backwards-compatibility-notes -# for more details. -TORNADO5 = tornado.version_info >= (5, 0, 0) - - -class TornadoWorker(Worker): - - @classmethod - def setup(cls): - web = sys.modules.pop("tornado.web") - old_clear = web.RequestHandler.clear - - def clear(self): - old_clear(self) - if "Gunicorn" not in self._headers["Server"]: - self._headers["Server"] += " (Gunicorn/%s)" % gversion - web.RequestHandler.clear = clear - sys.modules["tornado.web"] = web - - def handle_exit(self, sig, frame): - if self.alive: - super().handle_exit(sig, frame) - - def handle_request(self): - self.nr += 1 - if self.alive and self.nr >= self.max_requests: - self.log.info("Autorestarting worker after current request.") - self.alive = False - - def watchdog(self): - if self.alive: - self.notify() - - if self.ppid != os.getppid(): - self.log.info("Parent changed, shutting down: %s", self) - self.alive = False - - def heartbeat(self): - if not self.alive: - if self.server_alive: - if hasattr(self, 'server'): - try: - self.server.stop() - except Exception: - pass - self.server_alive = False - else: - if TORNADO5: - for callback in self.callbacks: - callback.stop() - self.ioloop.stop() - else: - if not self.ioloop._callbacks: - self.ioloop.stop() - - def init_process(self): - # IOLoop cannot survive a fork or be shared across processes - # in any way. When multiple processes are being used, each process - # should create its own IOLoop. We should clear current IOLoop - # if exists before os.fork. - IOLoop.clear_current() - super().init_process() - - def run(self): - self.ioloop = IOLoop.instance() - self.alive = True - self.server_alive = False - - if TORNADO5: - self.callbacks = [] - self.callbacks.append(PeriodicCallback(self.watchdog, 1000)) - self.callbacks.append(PeriodicCallback(self.heartbeat, 1000)) - for callback in self.callbacks: - callback.start() - else: - PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start() - PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start() - - # Assume the app is a WSGI callable if its not an - # instance of tornado.web.Application or is an - # instance of tornado.wsgi.WSGIApplication - app = self.wsgi - - if tornado.version_info[0] < 6: - if not isinstance(app, tornado.web.Application) or \ - isinstance(app, tornado.wsgi.WSGIApplication): - app = WSGIContainer(app) - elif not isinstance(app, WSGIContainer) and \ - not isinstance(app, tornado.web.Application): - app = WSGIContainer(app) - - # Monkey-patching HTTPConnection.finish to count the - # number of requests being handled by Tornado. This - # will help gunicorn shutdown the worker if max_requests - # is exceeded. - httpserver = sys.modules["tornado.httpserver"] - if hasattr(httpserver, 'HTTPConnection'): - old_connection_finish = httpserver.HTTPConnection.finish - - def finish(other): - self.handle_request() - old_connection_finish(other) - httpserver.HTTPConnection.finish = finish - sys.modules["tornado.httpserver"] = httpserver - - server_class = tornado.httpserver.HTTPServer - else: - - class _HTTPServer(tornado.httpserver.HTTPServer): - - def on_close(instance, server_conn): - self.handle_request() - super().on_close(server_conn) - - server_class = _HTTPServer - - if self.cfg.is_ssl: - if TORNADO5: - server = server_class(app, ssl_options=ssl_context(self.cfg)) - else: - server = server_class(app, io_loop=self.ioloop, - ssl_options=ssl_context(self.cfg)) - else: - if TORNADO5: - server = server_class(app) - else: - server = server_class(app, io_loop=self.ioloop) - - self.server = server - self.server_alive = True - - for s in self.sockets: - s.setblocking(0) - if hasattr(server, "add_socket"): # tornado > 2.0 - server.add_socket(s) - elif hasattr(server, "_sockets"): # tornado 2.0 - server._sockets[s.fileno()] = s - - server.no_keep_alive = self.cfg.keepalive <= 0 - server.start(num_processes=1) - - self.ioloop.start() diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/sync.py b/venv/lib/python3.10/site-packages/gunicorn/workers/sync.py deleted file mode 100644 index 4c029f9..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/sync.py +++ /dev/null @@ -1,209 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. -# - -from datetime import datetime -import errno -import os -import select -import socket -import ssl -import sys - -from gunicorn import http -from gunicorn.http import wsgi -from gunicorn import sock -from gunicorn import util -from gunicorn.workers import base - - -class StopWaiting(Exception): - """ exception raised to stop waiting for a connection """ - - -class SyncWorker(base.Worker): - - def accept(self, listener): - client, addr = listener.accept() - client.setblocking(1) - util.close_on_exec(client) - self.handle(listener, client, addr) - - def wait(self, timeout): - try: - self.notify() - ret = select.select(self.wait_fds, [], [], timeout) - if ret[0]: - if self.PIPE[0] in ret[0]: - os.read(self.PIPE[0], 1) - return ret[0] - - except OSError as e: - if e.args[0] == errno.EINTR: - return self.sockets - if e.args[0] == errno.EBADF: - if self.nr < 0: - return self.sockets - else: - raise StopWaiting - raise - - def is_parent_alive(self): - # If our parent changed then we shut down. - if self.ppid != os.getppid(): - self.log.info("Parent changed, shutting down: %s", self) - return False - return True - - def run_for_one(self, timeout): - listener = self.sockets[0] - while self.alive: - self.notify() - - # Accept a connection. If we get an error telling us - # that no connection is waiting we fall down to the - # select which is where we'll wait for a bit for new - # workers to come give us some love. - try: - self.accept(listener) - # Keep processing clients until no one is waiting. This - # prevents the need to select() for every client that we - # process. - continue - - except OSError as e: - if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, - errno.EWOULDBLOCK): - raise - - if not self.is_parent_alive(): - return - - try: - self.wait(timeout) - except StopWaiting: - return - - def run_for_multiple(self, timeout): - while self.alive: - self.notify() - - try: - ready = self.wait(timeout) - except StopWaiting: - return - - if ready is not None: - for listener in ready: - if listener == self.PIPE[0]: - continue - - try: - self.accept(listener) - except OSError as e: - if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, - errno.EWOULDBLOCK): - raise - - if not self.is_parent_alive(): - return - - def run(self): - # if no timeout is given the worker will never wait and will - # use the CPU for nothing. This minimal timeout prevent it. - timeout = self.timeout or 0.5 - - # self.socket appears to lose its blocking status after - # we fork in the arbiter. Reset it here. - for s in self.sockets: - s.setblocking(0) - - if len(self.sockets) > 1: - self.run_for_multiple(timeout) - else: - self.run_for_one(timeout) - - def handle(self, listener, client, addr): - req = None - try: - if self.cfg.is_ssl: - client = sock.ssl_wrap_socket(client, self.cfg) - parser = http.RequestParser(self.cfg, client, addr) - req = next(parser) - self.handle_request(listener, req, client, addr) - except http.errors.NoMoreData as e: - self.log.debug("Ignored premature client disconnection. %s", e) - except StopIteration as e: - self.log.debug("Closing connection. %s", e) - except ssl.SSLError as e: - if e.args[0] == ssl.SSL_ERROR_EOF: - self.log.debug("ssl connection closed") - client.close() - else: - self.log.debug("Error processing SSL request.") - self.handle_error(req, client, addr, e) - except OSError as e: - if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): - self.log.exception("Socket error processing request.") - else: - if e.errno == errno.ECONNRESET: - self.log.debug("Ignoring connection reset") - elif e.errno == errno.ENOTCONN: - self.log.debug("Ignoring socket not connected") - else: - self.log.debug("Ignoring EPIPE") - except BaseException as e: - self.handle_error(req, client, addr, e) - finally: - util.close(client) - - def handle_request(self, listener, req, client, addr): - environ = {} - resp = None - try: - self.cfg.pre_request(self, req) - request_start = datetime.now() - resp, environ = wsgi.create(req, client, addr, - listener.getsockname(), self.cfg) - # Force the connection closed until someone shows - # a buffering proxy that supports Keep-Alive to - # the backend. - resp.force_close() - self.nr += 1 - if self.nr >= self.max_requests: - self.log.info("Autorestarting worker after current request.") - self.alive = False - respiter = self.wsgi(environ, resp.start_response) - try: - if isinstance(respiter, environ['wsgi.file_wrapper']): - resp.write_file(respiter) - else: - for item in respiter: - resp.write(item) - resp.close() - finally: - request_time = datetime.now() - request_start - self.log.access(resp, req, environ, request_time) - if hasattr(respiter, "close"): - respiter.close() - except OSError: - # pass to next try-except level - util.reraise(*sys.exc_info()) - except Exception: - if resp and resp.headers_sent: - # If the requests have already been sent, we should close the - # connection to indicate the error. - self.log.exception("Error handling request") - try: - client.shutdown(socket.SHUT_RDWR) - client.close() - except OSError: - pass - raise StopIteration() - raise - finally: - try: - self.cfg.post_request(self, req, environ, resp) - except Exception: - self.log.exception("Exception in post_request hook") diff --git a/venv/lib/python3.10/site-packages/gunicorn/workers/workertmp.py b/venv/lib/python3.10/site-packages/gunicorn/workers/workertmp.py deleted file mode 100644 index 8ef00a5..0000000 --- a/venv/lib/python3.10/site-packages/gunicorn/workers/workertmp.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# This file is part of gunicorn released under the MIT license. -# See the NOTICE for more information. - -import os -import time -import platform -import tempfile - -from gunicorn import util - -PLATFORM = platform.system() -IS_CYGWIN = PLATFORM.startswith('CYGWIN') - - -class WorkerTmp: - - def __init__(self, cfg): - old_umask = os.umask(cfg.umask) - fdir = cfg.worker_tmp_dir - if fdir and not os.path.isdir(fdir): - raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir) - fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir) - os.umask(old_umask) - - # change the owner and group of the file if the worker will run as - # a different user or group, so that the worker can modify the file - if cfg.uid != os.geteuid() or cfg.gid != os.getegid(): - util.chown(name, cfg.uid, cfg.gid) - - # unlink the file so we don't leak temporary files - try: - if not IS_CYGWIN: - util.unlink(name) - # In Python 3.8, open() emits RuntimeWarning if buffering=1 for binary mode. - # Because we never write to this file, pass 0 to switch buffering off. - self._tmp = os.fdopen(fd, 'w+b', 0) - except Exception: - os.close(fd) - raise - - def notify(self): - new_time = time.monotonic() - os.utime(self._tmp.fileno(), (new_time, new_time)) - - def last_update(self): - return os.fstat(self._tmp.fileno()).st_mtime - - def fileno(self): - return self._tmp.fileno() - - def close(self): - return self._tmp.close() diff --git a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/idna-3.10.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/LICENSE.md b/venv/lib/python3.10/site-packages/idna-3.10.dist-info/LICENSE.md deleted file mode 100644 index 19b6b45..0000000 --- a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/LICENSE.md +++ /dev/null @@ -1,31 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2013-2024, Kim Davies and contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/METADATA b/venv/lib/python3.10/site-packages/idna-3.10.dist-info/METADATA deleted file mode 100644 index c42623e..0000000 --- a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/METADATA +++ /dev/null @@ -1,250 +0,0 @@ -Metadata-Version: 2.1 -Name: idna -Version: 3.10 -Summary: Internationalized Domain Names in Applications (IDNA) -Author-email: Kim Davies -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Requires-Dist: ruff >= 0.6.2 ; extra == "all" -Requires-Dist: mypy >= 1.11.2 ; extra == "all" -Requires-Dist: pytest >= 8.3.2 ; extra == "all" -Requires-Dist: flake8 >= 7.1.1 ; extra == "all" -Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst -Project-URL: Issue tracker, https://github.com/kjd/idna/issues -Project-URL: Source, https://github.com/kjd/idna -Provides-Extra: all - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for the Internationalized Domain Names in -Applications (IDNA) protocol as specified in `RFC 5891 -`_. This is the latest version of -the protocol and is sometimes referred to as “IDNA 2008”. - -This library also provides support for Unicode Technical -Standard 46, `Unicode IDNA Compatibility Processing -`_. - -This acts as a suitable replacement for the “encodings.idna” -module that comes with the Python standard library, but which -only supports the older superseded IDNA specification (`RFC 3490 -`_). - -Basic functions are simply executed: - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - -Installation ------------- - -This package is available for installation from PyPI: - -.. code-block:: bash - - $ python3 -m pip install idna - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a -domain name argument and perform a conversion to A-labels or U-labels -respectively. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -You may use the codec encoding and decoding methods using the -``idna.codec`` module: - -.. code-block:: pycon - - >>> import idna.codec - >>> print('домен.испытание'.encode('idna2008')) - b'xn--d1acufc.xn--80akhbyknj4f' - >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) - домен.испытание - -Conversions can be applied at a per-label basis using the ``ulabel`` or -``alabel`` functions if necessary: - -.. code-block:: pycon - - >>> idna.alabel('测试') - b'xn--0zwm56d' - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -As described in `RFC 5895 `_, the -IDNA specification does not normalize input from different potential -ways a user may input a domain name. This functionality, known as -a “mapping”, is considered by the specification to be a local -user-interface issue distinct from IDNA conversion functionality. - -This library provides one such mapping that was developed by the -Unicode Consortium. Known as `Unicode IDNA Compatibility Processing -`_, it provides for both a regular -mapping for typical applications, as well as a transitional mapping to -help migrate from older IDNA 2003 applications. Strings are -preprocessed according to Section 4.4 “Preprocessing for IDNA2008” -prior to the IDNA operations. - -For example, “Königsgäßchen” is not a permissible label as *LATIN -CAPITAL LETTER K* is not allowed (nor are capital letters in general). -UTS 46 will convert this into lower case prior to applying the IDNA -conversion. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - -Transitional processing provides conversions to help transition from -the older 2003 standard to the current standard. For example, in the -original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was -converted into two *LATIN SMALL LETTER S* (ss), whereas in the current -IDNA specification this conversion is not performed. - -.. code-block:: pycon - - >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) - 'xn--knigsgsschen-lcb0w' - -Implementers should use transitional processing with caution, only in -rare cases where conversion from legacy labels to current labels must be -performed (i.e. IDNA implementations that pre-date 2008). For typical -applications that just need to convert labels, transitional processing -is unlikely to be beneficial and could produce unexpected incompatible -results. - -``encodings.idna`` Compatibility -++++++++++++++++++++++++++++++++ - -Function calls from the Python built-in ``encodings.idna`` module are -mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. -Simply substitute the ``import`` clause in your code to refer to the new -module name. - -Exceptions ----------- - -All errors raised during the conversion following the specification -should raise an exception derived from the ``idna.IDNAError`` base -class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and -right-to-left characters in a label; ``idna.InvalidCodepoint`` when -a specific codepoint is an illegal character in an IDN label (i.e. -INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is -illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ -but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup -tables for performance. These tables are derived from computing against -eligibility criteria in the respective standards. These tables are -computed using the command-line script ``tools/idna-data``. - -This tool will fetch relevant codepoint data from the Unicode repository -and perform the required calculations to identify eligibility. There are -three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and - ``uts46data.py``, the pre-calculated lookup tables used for IDNA and - UTS 46 conversions. Implementers who wish to track this library against - a different Unicode version may use this tool to manually generate a - different version of the ``idnadata.py`` and ``uts46data.py`` files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix - B.1 of RFC 5892 and the pre-computed tables published by `IANA - `_. - -* ``idna-data U+0061``. Prints debugging output on the various - properties associated with an individual Unicode codepoint (in this - case, U+0061), that are used to assess the IDNA and UTS 46 status of a - codepoint. This is helpful in debugging or analysis. - -The tool accepts a number of arguments, described using ``idna-data --h``. Most notably, the ``--version`` argument allows the specification -of the version of Unicode to be used in computing the table data. For -example, ``idna-data --version 9.0.0 make-libdata`` will generate -library data against Unicode 9.0.0. - - -Additional Notes ----------------- - -* **Packages**. The latest tagged release version is published in the - `Python Package Index `_. - -* **Version support**. This library supports Python 3.6 and higher. - As this library serves as a low-level toolkit for a variety of - applications, many of which strive for broad compatibility with older - Python versions, there is no rush to remove older interpreter support. - Removing support for older versions should be well justified in that the - maintenance burden has become too high. - -* **Python 2**. Python 2 is supported by version 2.x of this library. - Use "idna<3" in your requirements file if you need this library for - a Python 2 application. Be advised that these versions are no longer - actively developed. - -* **Testing**. The library has a test suite based on each rule of the - IDNA specification, as well as tests that are provided as part of the - Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing - `_. - -* **Emoji**. It is an occasional request to support emoji domains in - this library. Encoding of symbols like emoji is expressly prohibited by - the technical standard IDNA 2008 and emoji domains are broadly phased - out across the domain industry due to associated security risks. For - now, applications that need to support these non-compliant labels - may wish to consider trying the encode/decode operation in this library - first, and then falling back to using `encodings.idna`. See `the Github - project `_ for more discussion. - diff --git a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/RECORD b/venv/lib/python3.10/site-packages/idna-3.10.dist-info/RECORD deleted file mode 100644 index 97aaf63..0000000 --- a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -idna-3.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-3.10.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 -idna-3.10.dist-info/METADATA,sha256=URR5ZyDfQ1PCEGhkYoojqfi2Ra0tau2--lhwG4XSfjI,10158 -idna-3.10.dist-info/RECORD,, -idna-3.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 -idna/__pycache__/__init__.cpython-310.pyc,, -idna/__pycache__/codec.cpython-310.pyc,, -idna/__pycache__/compat.cpython-310.pyc,, -idna/__pycache__/core.cpython-310.pyc,, -idna/__pycache__/idnadata.cpython-310.pyc,, -idna/__pycache__/intranges.cpython-310.pyc,, -idna/__pycache__/package_data.cpython-310.pyc,, -idna/__pycache__/uts46data.cpython-310.pyc,, -idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 -idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 -idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 -idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 -idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 -idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 -idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 diff --git a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/WHEEL b/venv/lib/python3.10/site-packages/idna-3.10.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/venv/lib/python3.10/site-packages/idna-3.10.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/idna/__init__.py b/venv/lib/python3.10/site-packages/idna/__init__.py deleted file mode 100644 index cfdc030..0000000 --- a/venv/lib/python3.10/site-packages/idna/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -from .core import ( - IDNABidiError, - IDNAError, - InvalidCodepoint, - InvalidCodepointContext, - alabel, - check_bidi, - check_hyphen_ok, - check_initial_combiner, - check_label, - check_nfc, - decode, - encode, - ulabel, - uts46_remap, - valid_contextj, - valid_contexto, - valid_label_length, - valid_string_length, -) -from .intranges import intranges_contain -from .package_data import __version__ - -__all__ = [ - "__version__", - "IDNABidiError", - "IDNAError", - "InvalidCodepoint", - "InvalidCodepointContext", - "alabel", - "check_bidi", - "check_hyphen_ok", - "check_initial_combiner", - "check_label", - "check_nfc", - "decode", - "encode", - "intranges_contain", - "ulabel", - "uts46_remap", - "valid_contextj", - "valid_contexto", - "valid_label_length", - "valid_string_length", -] diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 6c504a4..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc deleted file mode 100644 index e239212..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc deleted file mode 100644 index 8e5f197..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc deleted file mode 100644 index 6987fbb..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc deleted file mode 100644 index d9b9a1c..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc deleted file mode 100644 index 5a1885d..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc deleted file mode 100644 index 7d709a6..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc deleted file mode 100644 index 36c8e7a..0000000 Binary files a/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/idna/codec.py b/venv/lib/python3.10/site-packages/idna/codec.py deleted file mode 100644 index 913abfd..0000000 --- a/venv/lib/python3.10/site-packages/idna/codec.py +++ /dev/null @@ -1,122 +0,0 @@ -import codecs -import re -from typing import Any, Optional, Tuple - -from .core import IDNAError, alabel, decode, encode, ulabel - -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class Codec(codecs.Codec): - def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - return encode(data), len(data) - - def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return "", 0 - - return decode(data), len(data) - - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - labels = _unicode_dots_re.split(data) - trailing_dot = b"" - if labels: - if not labels[-1]: - trailing_dot = b"." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = b"." - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result_bytes = b".".join(result) + trailing_dot - size += len(trailing_dot) - return result_bytes, size - - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return ("", 0) - - if not isinstance(data, str): - data = str(data, "ascii") - - labels = _unicode_dots_re.split(data) - trailing_dot = "" - if labels: - if not labels[-1]: - trailing_dot = "." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = "." - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result_str = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result_str, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - - -class StreamReader(Codec, codecs.StreamReader): - pass - - -def search_function(name: str) -> Optional[codecs.CodecInfo]: - if name != "idna2008": - return None - return codecs.CodecInfo( - name=name, - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) - - -codecs.register(search_function) diff --git a/venv/lib/python3.10/site-packages/idna/compat.py b/venv/lib/python3.10/site-packages/idna/compat.py deleted file mode 100644 index 1df9f2a..0000000 --- a/venv/lib/python3.10/site-packages/idna/compat.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, Union - -from .core import decode, encode - - -def ToASCII(label: str) -> bytes: - return encode(label) - - -def ToUnicode(label: Union[bytes, bytearray]) -> str: - return decode(label) - - -def nameprep(s: Any) -> None: - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.10/site-packages/idna/core.py b/venv/lib/python3.10/site-packages/idna/core.py deleted file mode 100644 index 9115f12..0000000 --- a/venv/lib/python3.10/site-packages/idna/core.py +++ /dev/null @@ -1,437 +0,0 @@ -import bisect -import re -import unicodedata -from typing import Optional, Union - -from . import idnadata -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b"xn--" -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class IDNAError(UnicodeError): - """Base exception for all IDNA-encoding related problems""" - - pass - - -class IDNABidiError(IDNAError): - """Exception when bidirectional requirements are not satisfied""" - - pass - - -class InvalidCodepoint(IDNAError): - """Exception when a disallowed or unallocated codepoint is used""" - - pass - - -class InvalidCodepointContext(IDNAError): - """Exception when the codepoint is not valid in the context it is used""" - - pass - - -def _combining_class(cp: int) -> int: - v = unicodedata.combining(chr(cp)) - if v == 0: - if not unicodedata.name(chr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - - -def _is_script(cp: str, script: str) -> bool: - return intranges_contain(ord(cp), idnadata.scripts[script]) - - -def _punycode(s: str) -> bytes: - return s.encode("punycode") - - -def _unot(s: int) -> str: - return "U+{:04X}".format(s) - - -def valid_label_length(label: Union[bytes, str]) -> bool: - if len(label) > 63: - return False - return True - - -def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label: str, check_ltr: bool = False) -> bool: - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == "": - # String likely comes from a newer version of Unicode - raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) - if direction in ["R", "AL", "AN"]: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ["R", "AL"]: - rtl = True - elif direction == "L": - rtl = False - else: - raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) - - valid_ending = False - number_type: Optional[str] = None - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if direction not in [ - "R", - "AL", - "AN", - "EN", - "ES", - "CS", - "ET", - "ON", - "BN", - "NSM", - ]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) - # Bidi rule 3 - if direction in ["R", "AL", "EN", "AN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - # Bidi rule 4 - if direction in ["AN", "EN"]: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError("Can not mix numeral types in a right-to-left label") - else: - # Bidi rule 5 - if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) - # Bidi rule 6 - if direction in ["L", "EN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - - if not valid_ending: - raise IDNABidiError("Label ends with illegal codepoint directionality") - - return True - - -def check_initial_combiner(label: str) -> bool: - if unicodedata.category(label[0])[0] == "M": - raise IDNAError("Label begins with an illegal combining character") - return True - - -def check_hyphen_ok(label: str) -> bool: - if label[2:4] == "--": - raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") - if label[0] == "-" or label[-1] == "-": - raise IDNAError("Label must not start or end with a hyphen") - return True - - -def check_nfc(label: str) -> None: - if unicodedata.normalize("NFC", label) != label: - raise IDNAError("Label must be in Normalization Form C") - - -def valid_contextj(label: str, pos: int) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x200C: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos - 1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("L"), ord("D")]: - ok = True - break - else: - break - - if not ok: - return False - - ok = False - for i in range(pos + 1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("R"), ord("D")]: - ok = True - break - else: - break - return ok - - if cp_value == 0x200D: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - return False - - -def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x00B7: - if 0 < pos < len(label) - 1: - if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label) - 1 and len(label) > 1: - return _is_script(label[pos + 1], "Greek") - return False - - elif cp_value == 0x05F3 or cp_value == 0x05F4: - if pos > 0: - return _is_script(label[pos - 1], "Hebrew") - return False - - elif cp_value == 0x30FB: - for cp in label: - if cp == "\u30fb": - continue - if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6F0 <= ord(cp) <= 0x06F9: - return False - return True - - elif 0x6F0 <= cp_value <= 0x6F9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - return False - - -def check_label(label: Union[str, bytes, bytearray]) -> None: - if isinstance(label, (bytes, bytearray)): - label = label.decode("utf-8") - if len(label) == 0: - raise IDNAError("Empty Label") - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for pos, cp in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext( - "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - except ValueError: - raise IDNAError( - "Unknown codepoint adjacent to joiner {} at position {} in {}".format( - _unot(cp_value), pos + 1, repr(label) - ) - ) - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): - if not valid_contexto(label, pos): - raise InvalidCodepointContext( - "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - else: - raise InvalidCodepoint( - "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) - ) - - check_bidi(label) - - -def alabel(label: str) -> bytes: - try: - label_bytes = label.encode("ascii") - ulabel(label_bytes) - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - return label_bytes - except UnicodeEncodeError: - pass - - check_label(label) - label_bytes = _alabel_prefix + _punycode(label) - - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - - return label_bytes - - -def ulabel(label: Union[str, bytes, bytearray]) -> str: - if not isinstance(label, (bytes, bytearray)): - try: - label_bytes = label.encode("ascii") - except UnicodeEncodeError: - check_label(label) - return label - else: - label_bytes = label - - label_bytes = label_bytes.lower() - if label_bytes.startswith(_alabel_prefix): - label_bytes = label_bytes[len(_alabel_prefix) :] - if not label_bytes: - raise IDNAError("Malformed A-label, no Punycode eligible content found") - if label_bytes.decode("ascii")[-1] == "-": - raise IDNAError("A-label must not end with a hyphen") - else: - check_label(label_bytes) - return label_bytes.decode("ascii") - - try: - label = label_bytes.decode("punycode") - except UnicodeError: - raise IDNAError("Invalid A-label") - check_label(label) - return label - - -def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - - output = "" - - for pos, char in enumerate(domain): - code_point = ord(char) - try: - uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement: Optional[str] = None - if len(uts46row) == 3: - replacement = uts46row[2] - if ( - status == "V" - or (status == "D" and not transitional) - or (status == "3" and not std3_rules and replacement is None) - ): - output += char - elif replacement is not None and ( - status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) - ): - output += replacement - elif status != "I": - raise IndexError() - except IndexError: - raise InvalidCodepoint( - "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) - ) - - return unicodedata.normalize("NFC", output) - - -def encode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, - transitional: bool = False, -) -> bytes: - if not isinstance(s, str): - try: - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("should pass a unicode string to the function rather than a byte string.") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split(".") - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if labels[-1] == "": - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append(b"") - s = b".".join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError("Domain too long") - return s - - -def decode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, -) -> str: - try: - if not isinstance(s, str): - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("Invalid ASCII in A-label") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(".") - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append("") - return ".".join(result) diff --git a/venv/lib/python3.10/site-packages/idna/idnadata.py b/venv/lib/python3.10/site-packages/idna/idnadata.py deleted file mode 100644 index 4be6004..0000000 --- a/venv/lib/python3.10/site-packages/idna/idnadata.py +++ /dev/null @@ -1,4243 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = "15.1.0" -scripts = { - "Greek": ( - 0x37000000374, - 0x37500000378, - 0x37A0000037E, - 0x37F00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038B, - 0x38C0000038D, - 0x38E000003A2, - 0x3A3000003E2, - 0x3F000000400, - 0x1D2600001D2B, - 0x1D5D00001D62, - 0x1D6600001D6B, - 0x1DBF00001DC0, - 0x1F0000001F16, - 0x1F1800001F1E, - 0x1F2000001F46, - 0x1F4800001F4E, - 0x1F5000001F58, - 0x1F5900001F5A, - 0x1F5B00001F5C, - 0x1F5D00001F5E, - 0x1F5F00001F7E, - 0x1F8000001FB5, - 0x1FB600001FC5, - 0x1FC600001FD4, - 0x1FD600001FDC, - 0x1FDD00001FF0, - 0x1FF200001FF5, - 0x1FF600001FFF, - 0x212600002127, - 0xAB650000AB66, - 0x101400001018F, - 0x101A0000101A1, - 0x1D2000001D246, - ), - "Han": ( - 0x2E8000002E9A, - 0x2E9B00002EF4, - 0x2F0000002FD6, - 0x300500003006, - 0x300700003008, - 0x30210000302A, - 0x30380000303C, - 0x340000004DC0, - 0x4E000000A000, - 0xF9000000FA6E, - 0xFA700000FADA, - 0x16FE200016FE4, - 0x16FF000016FF2, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x2F8000002FA1E, - 0x300000003134B, - 0x31350000323B0, - ), - "Hebrew": ( - 0x591000005C8, - 0x5D0000005EB, - 0x5EF000005F5, - 0xFB1D0000FB37, - 0xFB380000FB3D, - 0xFB3E0000FB3F, - 0xFB400000FB42, - 0xFB430000FB45, - 0xFB460000FB50, - ), - "Hiragana": ( - 0x304100003097, - 0x309D000030A0, - 0x1B0010001B120, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1F2000001F201, - ), - "Katakana": ( - 0x30A1000030FB, - 0x30FD00003100, - 0x31F000003200, - 0x32D0000032FF, - 0x330000003358, - 0xFF660000FF70, - 0xFF710000FF9E, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B001, - 0x1B1200001B123, - 0x1B1550001B156, - 0x1B1640001B168, - ), -} -joining_types = { - 0xAD: 84, - 0x300: 84, - 0x301: 84, - 0x302: 84, - 0x303: 84, - 0x304: 84, - 0x305: 84, - 0x306: 84, - 0x307: 84, - 0x308: 84, - 0x309: 84, - 0x30A: 84, - 0x30B: 84, - 0x30C: 84, - 0x30D: 84, - 0x30E: 84, - 0x30F: 84, - 0x310: 84, - 0x311: 84, - 0x312: 84, - 0x313: 84, - 0x314: 84, - 0x315: 84, - 0x316: 84, - 0x317: 84, - 0x318: 84, - 0x319: 84, - 0x31A: 84, - 0x31B: 84, - 0x31C: 84, - 0x31D: 84, - 0x31E: 84, - 0x31F: 84, - 0x320: 84, - 0x321: 84, - 0x322: 84, - 0x323: 84, - 0x324: 84, - 0x325: 84, - 0x326: 84, - 0x327: 84, - 0x328: 84, - 0x329: 84, - 0x32A: 84, - 0x32B: 84, - 0x32C: 84, - 0x32D: 84, - 0x32E: 84, - 0x32F: 84, - 0x330: 84, - 0x331: 84, - 0x332: 84, - 0x333: 84, - 0x334: 84, - 0x335: 84, - 0x336: 84, - 0x337: 84, - 0x338: 84, - 0x339: 84, - 0x33A: 84, - 0x33B: 84, - 0x33C: 84, - 0x33D: 84, - 0x33E: 84, - 0x33F: 84, - 0x340: 84, - 0x341: 84, - 0x342: 84, - 0x343: 84, - 0x344: 84, - 0x345: 84, - 0x346: 84, - 0x347: 84, - 0x348: 84, - 0x349: 84, - 0x34A: 84, - 0x34B: 84, - 0x34C: 84, - 0x34D: 84, - 0x34E: 84, - 0x34F: 84, - 0x350: 84, - 0x351: 84, - 0x352: 84, - 0x353: 84, - 0x354: 84, - 0x355: 84, - 0x356: 84, - 0x357: 84, - 0x358: 84, - 0x359: 84, - 0x35A: 84, - 0x35B: 84, - 0x35C: 84, - 0x35D: 84, - 0x35E: 84, - 0x35F: 84, - 0x360: 84, - 0x361: 84, - 0x362: 84, - 0x363: 84, - 0x364: 84, - 0x365: 84, - 0x366: 84, - 0x367: 84, - 0x368: 84, - 0x369: 84, - 0x36A: 84, - 0x36B: 84, - 0x36C: 84, - 0x36D: 84, - 0x36E: 84, - 0x36F: 84, - 0x483: 84, - 0x484: 84, - 0x485: 84, - 0x486: 84, - 0x487: 84, - 0x488: 84, - 0x489: 84, - 0x591: 84, - 0x592: 84, - 0x593: 84, - 0x594: 84, - 0x595: 84, - 0x596: 84, - 0x597: 84, - 0x598: 84, - 0x599: 84, - 0x59A: 84, - 0x59B: 84, - 0x59C: 84, - 0x59D: 84, - 0x59E: 84, - 0x59F: 84, - 0x5A0: 84, - 0x5A1: 84, - 0x5A2: 84, - 0x5A3: 84, - 0x5A4: 84, - 0x5A5: 84, - 0x5A6: 84, - 0x5A7: 84, - 0x5A8: 84, - 0x5A9: 84, - 0x5AA: 84, - 0x5AB: 84, - 0x5AC: 84, - 0x5AD: 84, - 0x5AE: 84, - 0x5AF: 84, - 0x5B0: 84, - 0x5B1: 84, - 0x5B2: 84, - 0x5B3: 84, - 0x5B4: 84, - 0x5B5: 84, - 0x5B6: 84, - 0x5B7: 84, - 0x5B8: 84, - 0x5B9: 84, - 0x5BA: 84, - 0x5BB: 84, - 0x5BC: 84, - 0x5BD: 84, - 0x5BF: 84, - 0x5C1: 84, - 0x5C2: 84, - 0x5C4: 84, - 0x5C5: 84, - 0x5C7: 84, - 0x610: 84, - 0x611: 84, - 0x612: 84, - 0x613: 84, - 0x614: 84, - 0x615: 84, - 0x616: 84, - 0x617: 84, - 0x618: 84, - 0x619: 84, - 0x61A: 84, - 0x61C: 84, - 0x620: 68, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62A: 68, - 0x62B: 68, - 0x62C: 68, - 0x62D: 68, - 0x62E: 68, - 0x62F: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63A: 68, - 0x63B: 68, - 0x63C: 68, - 0x63D: 68, - 0x63E: 68, - 0x63F: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64A: 68, - 0x64B: 84, - 0x64C: 84, - 0x64D: 84, - 0x64E: 84, - 0x64F: 84, - 0x650: 84, - 0x651: 84, - 0x652: 84, - 0x653: 84, - 0x654: 84, - 0x655: 84, - 0x656: 84, - 0x657: 84, - 0x658: 84, - 0x659: 84, - 0x65A: 84, - 0x65B: 84, - 0x65C: 84, - 0x65D: 84, - 0x65E: 84, - 0x65F: 84, - 0x66E: 68, - 0x66F: 68, - 0x670: 84, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67A: 68, - 0x67B: 68, - 0x67C: 68, - 0x67D: 68, - 0x67E: 68, - 0x67F: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68A: 82, - 0x68B: 82, - 0x68C: 82, - 0x68D: 82, - 0x68E: 82, - 0x68F: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69A: 68, - 0x69B: 68, - 0x69C: 68, - 0x69D: 68, - 0x69E: 68, - 0x69F: 68, - 0x6A0: 68, - 0x6A1: 68, - 0x6A2: 68, - 0x6A3: 68, - 0x6A4: 68, - 0x6A5: 68, - 0x6A6: 68, - 0x6A7: 68, - 0x6A8: 68, - 0x6A9: 68, - 0x6AA: 68, - 0x6AB: 68, - 0x6AC: 68, - 0x6AD: 68, - 0x6AE: 68, - 0x6AF: 68, - 0x6B0: 68, - 0x6B1: 68, - 0x6B2: 68, - 0x6B3: 68, - 0x6B4: 68, - 0x6B5: 68, - 0x6B6: 68, - 0x6B7: 68, - 0x6B8: 68, - 0x6B9: 68, - 0x6BA: 68, - 0x6BB: 68, - 0x6BC: 68, - 0x6BD: 68, - 0x6BE: 68, - 0x6BF: 68, - 0x6C0: 82, - 0x6C1: 68, - 0x6C2: 68, - 0x6C3: 82, - 0x6C4: 82, - 0x6C5: 82, - 0x6C6: 82, - 0x6C7: 82, - 0x6C8: 82, - 0x6C9: 82, - 0x6CA: 82, - 0x6CB: 82, - 0x6CC: 68, - 0x6CD: 82, - 0x6CE: 68, - 0x6CF: 82, - 0x6D0: 68, - 0x6D1: 68, - 0x6D2: 82, - 0x6D3: 82, - 0x6D5: 82, - 0x6D6: 84, - 0x6D7: 84, - 0x6D8: 84, - 0x6D9: 84, - 0x6DA: 84, - 0x6DB: 84, - 0x6DC: 84, - 0x6DF: 84, - 0x6E0: 84, - 0x6E1: 84, - 0x6E2: 84, - 0x6E3: 84, - 0x6E4: 84, - 0x6E7: 84, - 0x6E8: 84, - 0x6EA: 84, - 0x6EB: 84, - 0x6EC: 84, - 0x6ED: 84, - 0x6EE: 82, - 0x6EF: 82, - 0x6FA: 68, - 0x6FB: 68, - 0x6FC: 68, - 0x6FF: 68, - 0x70F: 84, - 0x710: 82, - 0x711: 84, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71A: 68, - 0x71B: 68, - 0x71C: 68, - 0x71D: 68, - 0x71E: 82, - 0x71F: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72A: 82, - 0x72B: 68, - 0x72C: 82, - 0x72D: 68, - 0x72E: 68, - 0x72F: 82, - 0x730: 84, - 0x731: 84, - 0x732: 84, - 0x733: 84, - 0x734: 84, - 0x735: 84, - 0x736: 84, - 0x737: 84, - 0x738: 84, - 0x739: 84, - 0x73A: 84, - 0x73B: 84, - 0x73C: 84, - 0x73D: 84, - 0x73E: 84, - 0x73F: 84, - 0x740: 84, - 0x741: 84, - 0x742: 84, - 0x743: 84, - 0x744: 84, - 0x745: 84, - 0x746: 84, - 0x747: 84, - 0x748: 84, - 0x749: 84, - 0x74A: 84, - 0x74D: 82, - 0x74E: 68, - 0x74F: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75A: 82, - 0x75B: 82, - 0x75C: 68, - 0x75D: 68, - 0x75E: 68, - 0x75F: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76A: 68, - 0x76B: 82, - 0x76C: 82, - 0x76D: 68, - 0x76E: 68, - 0x76F: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77A: 68, - 0x77B: 68, - 0x77C: 68, - 0x77D: 68, - 0x77E: 68, - 0x77F: 68, - 0x7A6: 84, - 0x7A7: 84, - 0x7A8: 84, - 0x7A9: 84, - 0x7AA: 84, - 0x7AB: 84, - 0x7AC: 84, - 0x7AD: 84, - 0x7AE: 84, - 0x7AF: 84, - 0x7B0: 84, - 0x7CA: 68, - 0x7CB: 68, - 0x7CC: 68, - 0x7CD: 68, - 0x7CE: 68, - 0x7CF: 68, - 0x7D0: 68, - 0x7D1: 68, - 0x7D2: 68, - 0x7D3: 68, - 0x7D4: 68, - 0x7D5: 68, - 0x7D6: 68, - 0x7D7: 68, - 0x7D8: 68, - 0x7D9: 68, - 0x7DA: 68, - 0x7DB: 68, - 0x7DC: 68, - 0x7DD: 68, - 0x7DE: 68, - 0x7DF: 68, - 0x7E0: 68, - 0x7E1: 68, - 0x7E2: 68, - 0x7E3: 68, - 0x7E4: 68, - 0x7E5: 68, - 0x7E6: 68, - 0x7E7: 68, - 0x7E8: 68, - 0x7E9: 68, - 0x7EA: 68, - 0x7EB: 84, - 0x7EC: 84, - 0x7ED: 84, - 0x7EE: 84, - 0x7EF: 84, - 0x7F0: 84, - 0x7F1: 84, - 0x7F2: 84, - 0x7F3: 84, - 0x7FA: 67, - 0x7FD: 84, - 0x816: 84, - 0x817: 84, - 0x818: 84, - 0x819: 84, - 0x81B: 84, - 0x81C: 84, - 0x81D: 84, - 0x81E: 84, - 0x81F: 84, - 0x820: 84, - 0x821: 84, - 0x822: 84, - 0x823: 84, - 0x825: 84, - 0x826: 84, - 0x827: 84, - 0x829: 84, - 0x82A: 84, - 0x82B: 84, - 0x82C: 84, - 0x82D: 84, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84A: 68, - 0x84B: 68, - 0x84C: 68, - 0x84D: 68, - 0x84E: 68, - 0x84F: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 82, - 0x857: 82, - 0x858: 82, - 0x859: 84, - 0x85A: 84, - 0x85B: 84, - 0x860: 68, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86A: 82, - 0x870: 82, - 0x871: 82, - 0x872: 82, - 0x873: 82, - 0x874: 82, - 0x875: 82, - 0x876: 82, - 0x877: 82, - 0x878: 82, - 0x879: 82, - 0x87A: 82, - 0x87B: 82, - 0x87C: 82, - 0x87D: 82, - 0x87E: 82, - 0x87F: 82, - 0x880: 82, - 0x881: 82, - 0x882: 82, - 0x883: 67, - 0x884: 67, - 0x885: 67, - 0x886: 68, - 0x889: 68, - 0x88A: 68, - 0x88B: 68, - 0x88C: 68, - 0x88D: 68, - 0x88E: 82, - 0x898: 84, - 0x899: 84, - 0x89A: 84, - 0x89B: 84, - 0x89C: 84, - 0x89D: 84, - 0x89E: 84, - 0x89F: 84, - 0x8A0: 68, - 0x8A1: 68, - 0x8A2: 68, - 0x8A3: 68, - 0x8A4: 68, - 0x8A5: 68, - 0x8A6: 68, - 0x8A7: 68, - 0x8A8: 68, - 0x8A9: 68, - 0x8AA: 82, - 0x8AB: 82, - 0x8AC: 82, - 0x8AE: 82, - 0x8AF: 68, - 0x8B0: 68, - 0x8B1: 82, - 0x8B2: 82, - 0x8B3: 68, - 0x8B4: 68, - 0x8B5: 68, - 0x8B6: 68, - 0x8B7: 68, - 0x8B8: 68, - 0x8B9: 82, - 0x8BA: 68, - 0x8BB: 68, - 0x8BC: 68, - 0x8BD: 68, - 0x8BE: 68, - 0x8BF: 68, - 0x8C0: 68, - 0x8C1: 68, - 0x8C2: 68, - 0x8C3: 68, - 0x8C4: 68, - 0x8C5: 68, - 0x8C6: 68, - 0x8C7: 68, - 0x8C8: 68, - 0x8CA: 84, - 0x8CB: 84, - 0x8CC: 84, - 0x8CD: 84, - 0x8CE: 84, - 0x8CF: 84, - 0x8D0: 84, - 0x8D1: 84, - 0x8D2: 84, - 0x8D3: 84, - 0x8D4: 84, - 0x8D5: 84, - 0x8D6: 84, - 0x8D7: 84, - 0x8D8: 84, - 0x8D9: 84, - 0x8DA: 84, - 0x8DB: 84, - 0x8DC: 84, - 0x8DD: 84, - 0x8DE: 84, - 0x8DF: 84, - 0x8E0: 84, - 0x8E1: 84, - 0x8E3: 84, - 0x8E4: 84, - 0x8E5: 84, - 0x8E6: 84, - 0x8E7: 84, - 0x8E8: 84, - 0x8E9: 84, - 0x8EA: 84, - 0x8EB: 84, - 0x8EC: 84, - 0x8ED: 84, - 0x8EE: 84, - 0x8EF: 84, - 0x8F0: 84, - 0x8F1: 84, - 0x8F2: 84, - 0x8F3: 84, - 0x8F4: 84, - 0x8F5: 84, - 0x8F6: 84, - 0x8F7: 84, - 0x8F8: 84, - 0x8F9: 84, - 0x8FA: 84, - 0x8FB: 84, - 0x8FC: 84, - 0x8FD: 84, - 0x8FE: 84, - 0x8FF: 84, - 0x900: 84, - 0x901: 84, - 0x902: 84, - 0x93A: 84, - 0x93C: 84, - 0x941: 84, - 0x942: 84, - 0x943: 84, - 0x944: 84, - 0x945: 84, - 0x946: 84, - 0x947: 84, - 0x948: 84, - 0x94D: 84, - 0x951: 84, - 0x952: 84, - 0x953: 84, - 0x954: 84, - 0x955: 84, - 0x956: 84, - 0x957: 84, - 0x962: 84, - 0x963: 84, - 0x981: 84, - 0x9BC: 84, - 0x9C1: 84, - 0x9C2: 84, - 0x9C3: 84, - 0x9C4: 84, - 0x9CD: 84, - 0x9E2: 84, - 0x9E3: 84, - 0x9FE: 84, - 0xA01: 84, - 0xA02: 84, - 0xA3C: 84, - 0xA41: 84, - 0xA42: 84, - 0xA47: 84, - 0xA48: 84, - 0xA4B: 84, - 0xA4C: 84, - 0xA4D: 84, - 0xA51: 84, - 0xA70: 84, - 0xA71: 84, - 0xA75: 84, - 0xA81: 84, - 0xA82: 84, - 0xABC: 84, - 0xAC1: 84, - 0xAC2: 84, - 0xAC3: 84, - 0xAC4: 84, - 0xAC5: 84, - 0xAC7: 84, - 0xAC8: 84, - 0xACD: 84, - 0xAE2: 84, - 0xAE3: 84, - 0xAFA: 84, - 0xAFB: 84, - 0xAFC: 84, - 0xAFD: 84, - 0xAFE: 84, - 0xAFF: 84, - 0xB01: 84, - 0xB3C: 84, - 0xB3F: 84, - 0xB41: 84, - 0xB42: 84, - 0xB43: 84, - 0xB44: 84, - 0xB4D: 84, - 0xB55: 84, - 0xB56: 84, - 0xB62: 84, - 0xB63: 84, - 0xB82: 84, - 0xBC0: 84, - 0xBCD: 84, - 0xC00: 84, - 0xC04: 84, - 0xC3C: 84, - 0xC3E: 84, - 0xC3F: 84, - 0xC40: 84, - 0xC46: 84, - 0xC47: 84, - 0xC48: 84, - 0xC4A: 84, - 0xC4B: 84, - 0xC4C: 84, - 0xC4D: 84, - 0xC55: 84, - 0xC56: 84, - 0xC62: 84, - 0xC63: 84, - 0xC81: 84, - 0xCBC: 84, - 0xCBF: 84, - 0xCC6: 84, - 0xCCC: 84, - 0xCCD: 84, - 0xCE2: 84, - 0xCE3: 84, - 0xD00: 84, - 0xD01: 84, - 0xD3B: 84, - 0xD3C: 84, - 0xD41: 84, - 0xD42: 84, - 0xD43: 84, - 0xD44: 84, - 0xD4D: 84, - 0xD62: 84, - 0xD63: 84, - 0xD81: 84, - 0xDCA: 84, - 0xDD2: 84, - 0xDD3: 84, - 0xDD4: 84, - 0xDD6: 84, - 0xE31: 84, - 0xE34: 84, - 0xE35: 84, - 0xE36: 84, - 0xE37: 84, - 0xE38: 84, - 0xE39: 84, - 0xE3A: 84, - 0xE47: 84, - 0xE48: 84, - 0xE49: 84, - 0xE4A: 84, - 0xE4B: 84, - 0xE4C: 84, - 0xE4D: 84, - 0xE4E: 84, - 0xEB1: 84, - 0xEB4: 84, - 0xEB5: 84, - 0xEB6: 84, - 0xEB7: 84, - 0xEB8: 84, - 0xEB9: 84, - 0xEBA: 84, - 0xEBB: 84, - 0xEBC: 84, - 0xEC8: 84, - 0xEC9: 84, - 0xECA: 84, - 0xECB: 84, - 0xECC: 84, - 0xECD: 84, - 0xECE: 84, - 0xF18: 84, - 0xF19: 84, - 0xF35: 84, - 0xF37: 84, - 0xF39: 84, - 0xF71: 84, - 0xF72: 84, - 0xF73: 84, - 0xF74: 84, - 0xF75: 84, - 0xF76: 84, - 0xF77: 84, - 0xF78: 84, - 0xF79: 84, - 0xF7A: 84, - 0xF7B: 84, - 0xF7C: 84, - 0xF7D: 84, - 0xF7E: 84, - 0xF80: 84, - 0xF81: 84, - 0xF82: 84, - 0xF83: 84, - 0xF84: 84, - 0xF86: 84, - 0xF87: 84, - 0xF8D: 84, - 0xF8E: 84, - 0xF8F: 84, - 0xF90: 84, - 0xF91: 84, - 0xF92: 84, - 0xF93: 84, - 0xF94: 84, - 0xF95: 84, - 0xF96: 84, - 0xF97: 84, - 0xF99: 84, - 0xF9A: 84, - 0xF9B: 84, - 0xF9C: 84, - 0xF9D: 84, - 0xF9E: 84, - 0xF9F: 84, - 0xFA0: 84, - 0xFA1: 84, - 0xFA2: 84, - 0xFA3: 84, - 0xFA4: 84, - 0xFA5: 84, - 0xFA6: 84, - 0xFA7: 84, - 0xFA8: 84, - 0xFA9: 84, - 0xFAA: 84, - 0xFAB: 84, - 0xFAC: 84, - 0xFAD: 84, - 0xFAE: 84, - 0xFAF: 84, - 0xFB0: 84, - 0xFB1: 84, - 0xFB2: 84, - 0xFB3: 84, - 0xFB4: 84, - 0xFB5: 84, - 0xFB6: 84, - 0xFB7: 84, - 0xFB8: 84, - 0xFB9: 84, - 0xFBA: 84, - 0xFBB: 84, - 0xFBC: 84, - 0xFC6: 84, - 0x102D: 84, - 0x102E: 84, - 0x102F: 84, - 0x1030: 84, - 0x1032: 84, - 0x1033: 84, - 0x1034: 84, - 0x1035: 84, - 0x1036: 84, - 0x1037: 84, - 0x1039: 84, - 0x103A: 84, - 0x103D: 84, - 0x103E: 84, - 0x1058: 84, - 0x1059: 84, - 0x105E: 84, - 0x105F: 84, - 0x1060: 84, - 0x1071: 84, - 0x1072: 84, - 0x1073: 84, - 0x1074: 84, - 0x1082: 84, - 0x1085: 84, - 0x1086: 84, - 0x108D: 84, - 0x109D: 84, - 0x135D: 84, - 0x135E: 84, - 0x135F: 84, - 0x1712: 84, - 0x1713: 84, - 0x1714: 84, - 0x1732: 84, - 0x1733: 84, - 0x1752: 84, - 0x1753: 84, - 0x1772: 84, - 0x1773: 84, - 0x17B4: 84, - 0x17B5: 84, - 0x17B7: 84, - 0x17B8: 84, - 0x17B9: 84, - 0x17BA: 84, - 0x17BB: 84, - 0x17BC: 84, - 0x17BD: 84, - 0x17C6: 84, - 0x17C9: 84, - 0x17CA: 84, - 0x17CB: 84, - 0x17CC: 84, - 0x17CD: 84, - 0x17CE: 84, - 0x17CF: 84, - 0x17D0: 84, - 0x17D1: 84, - 0x17D2: 84, - 0x17D3: 84, - 0x17DD: 84, - 0x1807: 68, - 0x180A: 67, - 0x180B: 84, - 0x180C: 84, - 0x180D: 84, - 0x180F: 84, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182A: 68, - 0x182B: 68, - 0x182C: 68, - 0x182D: 68, - 0x182E: 68, - 0x182F: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183A: 68, - 0x183B: 68, - 0x183C: 68, - 0x183D: 68, - 0x183E: 68, - 0x183F: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184A: 68, - 0x184B: 68, - 0x184C: 68, - 0x184D: 68, - 0x184E: 68, - 0x184F: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185A: 68, - 0x185B: 68, - 0x185C: 68, - 0x185D: 68, - 0x185E: 68, - 0x185F: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186A: 68, - 0x186B: 68, - 0x186C: 68, - 0x186D: 68, - 0x186E: 68, - 0x186F: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188A: 68, - 0x188B: 68, - 0x188C: 68, - 0x188D: 68, - 0x188E: 68, - 0x188F: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189A: 68, - 0x189B: 68, - 0x189C: 68, - 0x189D: 68, - 0x189E: 68, - 0x189F: 68, - 0x18A0: 68, - 0x18A1: 68, - 0x18A2: 68, - 0x18A3: 68, - 0x18A4: 68, - 0x18A5: 68, - 0x18A6: 68, - 0x18A7: 68, - 0x18A8: 68, - 0x18A9: 84, - 0x18AA: 68, - 0x1920: 84, - 0x1921: 84, - 0x1922: 84, - 0x1927: 84, - 0x1928: 84, - 0x1932: 84, - 0x1939: 84, - 0x193A: 84, - 0x193B: 84, - 0x1A17: 84, - 0x1A18: 84, - 0x1A1B: 84, - 0x1A56: 84, - 0x1A58: 84, - 0x1A59: 84, - 0x1A5A: 84, - 0x1A5B: 84, - 0x1A5C: 84, - 0x1A5D: 84, - 0x1A5E: 84, - 0x1A60: 84, - 0x1A62: 84, - 0x1A65: 84, - 0x1A66: 84, - 0x1A67: 84, - 0x1A68: 84, - 0x1A69: 84, - 0x1A6A: 84, - 0x1A6B: 84, - 0x1A6C: 84, - 0x1A73: 84, - 0x1A74: 84, - 0x1A75: 84, - 0x1A76: 84, - 0x1A77: 84, - 0x1A78: 84, - 0x1A79: 84, - 0x1A7A: 84, - 0x1A7B: 84, - 0x1A7C: 84, - 0x1A7F: 84, - 0x1AB0: 84, - 0x1AB1: 84, - 0x1AB2: 84, - 0x1AB3: 84, - 0x1AB4: 84, - 0x1AB5: 84, - 0x1AB6: 84, - 0x1AB7: 84, - 0x1AB8: 84, - 0x1AB9: 84, - 0x1ABA: 84, - 0x1ABB: 84, - 0x1ABC: 84, - 0x1ABD: 84, - 0x1ABE: 84, - 0x1ABF: 84, - 0x1AC0: 84, - 0x1AC1: 84, - 0x1AC2: 84, - 0x1AC3: 84, - 0x1AC4: 84, - 0x1AC5: 84, - 0x1AC6: 84, - 0x1AC7: 84, - 0x1AC8: 84, - 0x1AC9: 84, - 0x1ACA: 84, - 0x1ACB: 84, - 0x1ACC: 84, - 0x1ACD: 84, - 0x1ACE: 84, - 0x1B00: 84, - 0x1B01: 84, - 0x1B02: 84, - 0x1B03: 84, - 0x1B34: 84, - 0x1B36: 84, - 0x1B37: 84, - 0x1B38: 84, - 0x1B39: 84, - 0x1B3A: 84, - 0x1B3C: 84, - 0x1B42: 84, - 0x1B6B: 84, - 0x1B6C: 84, - 0x1B6D: 84, - 0x1B6E: 84, - 0x1B6F: 84, - 0x1B70: 84, - 0x1B71: 84, - 0x1B72: 84, - 0x1B73: 84, - 0x1B80: 84, - 0x1B81: 84, - 0x1BA2: 84, - 0x1BA3: 84, - 0x1BA4: 84, - 0x1BA5: 84, - 0x1BA8: 84, - 0x1BA9: 84, - 0x1BAB: 84, - 0x1BAC: 84, - 0x1BAD: 84, - 0x1BE6: 84, - 0x1BE8: 84, - 0x1BE9: 84, - 0x1BED: 84, - 0x1BEF: 84, - 0x1BF0: 84, - 0x1BF1: 84, - 0x1C2C: 84, - 0x1C2D: 84, - 0x1C2E: 84, - 0x1C2F: 84, - 0x1C30: 84, - 0x1C31: 84, - 0x1C32: 84, - 0x1C33: 84, - 0x1C36: 84, - 0x1C37: 84, - 0x1CD0: 84, - 0x1CD1: 84, - 0x1CD2: 84, - 0x1CD4: 84, - 0x1CD5: 84, - 0x1CD6: 84, - 0x1CD7: 84, - 0x1CD8: 84, - 0x1CD9: 84, - 0x1CDA: 84, - 0x1CDB: 84, - 0x1CDC: 84, - 0x1CDD: 84, - 0x1CDE: 84, - 0x1CDF: 84, - 0x1CE0: 84, - 0x1CE2: 84, - 0x1CE3: 84, - 0x1CE4: 84, - 0x1CE5: 84, - 0x1CE6: 84, - 0x1CE7: 84, - 0x1CE8: 84, - 0x1CED: 84, - 0x1CF4: 84, - 0x1CF8: 84, - 0x1CF9: 84, - 0x1DC0: 84, - 0x1DC1: 84, - 0x1DC2: 84, - 0x1DC3: 84, - 0x1DC4: 84, - 0x1DC5: 84, - 0x1DC6: 84, - 0x1DC7: 84, - 0x1DC8: 84, - 0x1DC9: 84, - 0x1DCA: 84, - 0x1DCB: 84, - 0x1DCC: 84, - 0x1DCD: 84, - 0x1DCE: 84, - 0x1DCF: 84, - 0x1DD0: 84, - 0x1DD1: 84, - 0x1DD2: 84, - 0x1DD3: 84, - 0x1DD4: 84, - 0x1DD5: 84, - 0x1DD6: 84, - 0x1DD7: 84, - 0x1DD8: 84, - 0x1DD9: 84, - 0x1DDA: 84, - 0x1DDB: 84, - 0x1DDC: 84, - 0x1DDD: 84, - 0x1DDE: 84, - 0x1DDF: 84, - 0x1DE0: 84, - 0x1DE1: 84, - 0x1DE2: 84, - 0x1DE3: 84, - 0x1DE4: 84, - 0x1DE5: 84, - 0x1DE6: 84, - 0x1DE7: 84, - 0x1DE8: 84, - 0x1DE9: 84, - 0x1DEA: 84, - 0x1DEB: 84, - 0x1DEC: 84, - 0x1DED: 84, - 0x1DEE: 84, - 0x1DEF: 84, - 0x1DF0: 84, - 0x1DF1: 84, - 0x1DF2: 84, - 0x1DF3: 84, - 0x1DF4: 84, - 0x1DF5: 84, - 0x1DF6: 84, - 0x1DF7: 84, - 0x1DF8: 84, - 0x1DF9: 84, - 0x1DFA: 84, - 0x1DFB: 84, - 0x1DFC: 84, - 0x1DFD: 84, - 0x1DFE: 84, - 0x1DFF: 84, - 0x200B: 84, - 0x200D: 67, - 0x200E: 84, - 0x200F: 84, - 0x202A: 84, - 0x202B: 84, - 0x202C: 84, - 0x202D: 84, - 0x202E: 84, - 0x2060: 84, - 0x2061: 84, - 0x2062: 84, - 0x2063: 84, - 0x2064: 84, - 0x206A: 84, - 0x206B: 84, - 0x206C: 84, - 0x206D: 84, - 0x206E: 84, - 0x206F: 84, - 0x20D0: 84, - 0x20D1: 84, - 0x20D2: 84, - 0x20D3: 84, - 0x20D4: 84, - 0x20D5: 84, - 0x20D6: 84, - 0x20D7: 84, - 0x20D8: 84, - 0x20D9: 84, - 0x20DA: 84, - 0x20DB: 84, - 0x20DC: 84, - 0x20DD: 84, - 0x20DE: 84, - 0x20DF: 84, - 0x20E0: 84, - 0x20E1: 84, - 0x20E2: 84, - 0x20E3: 84, - 0x20E4: 84, - 0x20E5: 84, - 0x20E6: 84, - 0x20E7: 84, - 0x20E8: 84, - 0x20E9: 84, - 0x20EA: 84, - 0x20EB: 84, - 0x20EC: 84, - 0x20ED: 84, - 0x20EE: 84, - 0x20EF: 84, - 0x20F0: 84, - 0x2CEF: 84, - 0x2CF0: 84, - 0x2CF1: 84, - 0x2D7F: 84, - 0x2DE0: 84, - 0x2DE1: 84, - 0x2DE2: 84, - 0x2DE3: 84, - 0x2DE4: 84, - 0x2DE5: 84, - 0x2DE6: 84, - 0x2DE7: 84, - 0x2DE8: 84, - 0x2DE9: 84, - 0x2DEA: 84, - 0x2DEB: 84, - 0x2DEC: 84, - 0x2DED: 84, - 0x2DEE: 84, - 0x2DEF: 84, - 0x2DF0: 84, - 0x2DF1: 84, - 0x2DF2: 84, - 0x2DF3: 84, - 0x2DF4: 84, - 0x2DF5: 84, - 0x2DF6: 84, - 0x2DF7: 84, - 0x2DF8: 84, - 0x2DF9: 84, - 0x2DFA: 84, - 0x2DFB: 84, - 0x2DFC: 84, - 0x2DFD: 84, - 0x2DFE: 84, - 0x2DFF: 84, - 0x302A: 84, - 0x302B: 84, - 0x302C: 84, - 0x302D: 84, - 0x3099: 84, - 0x309A: 84, - 0xA66F: 84, - 0xA670: 84, - 0xA671: 84, - 0xA672: 84, - 0xA674: 84, - 0xA675: 84, - 0xA676: 84, - 0xA677: 84, - 0xA678: 84, - 0xA679: 84, - 0xA67A: 84, - 0xA67B: 84, - 0xA67C: 84, - 0xA67D: 84, - 0xA69E: 84, - 0xA69F: 84, - 0xA6F0: 84, - 0xA6F1: 84, - 0xA802: 84, - 0xA806: 84, - 0xA80B: 84, - 0xA825: 84, - 0xA826: 84, - 0xA82C: 84, - 0xA840: 68, - 0xA841: 68, - 0xA842: 68, - 0xA843: 68, - 0xA844: 68, - 0xA845: 68, - 0xA846: 68, - 0xA847: 68, - 0xA848: 68, - 0xA849: 68, - 0xA84A: 68, - 0xA84B: 68, - 0xA84C: 68, - 0xA84D: 68, - 0xA84E: 68, - 0xA84F: 68, - 0xA850: 68, - 0xA851: 68, - 0xA852: 68, - 0xA853: 68, - 0xA854: 68, - 0xA855: 68, - 0xA856: 68, - 0xA857: 68, - 0xA858: 68, - 0xA859: 68, - 0xA85A: 68, - 0xA85B: 68, - 0xA85C: 68, - 0xA85D: 68, - 0xA85E: 68, - 0xA85F: 68, - 0xA860: 68, - 0xA861: 68, - 0xA862: 68, - 0xA863: 68, - 0xA864: 68, - 0xA865: 68, - 0xA866: 68, - 0xA867: 68, - 0xA868: 68, - 0xA869: 68, - 0xA86A: 68, - 0xA86B: 68, - 0xA86C: 68, - 0xA86D: 68, - 0xA86E: 68, - 0xA86F: 68, - 0xA870: 68, - 0xA871: 68, - 0xA872: 76, - 0xA8C4: 84, - 0xA8C5: 84, - 0xA8E0: 84, - 0xA8E1: 84, - 0xA8E2: 84, - 0xA8E3: 84, - 0xA8E4: 84, - 0xA8E5: 84, - 0xA8E6: 84, - 0xA8E7: 84, - 0xA8E8: 84, - 0xA8E9: 84, - 0xA8EA: 84, - 0xA8EB: 84, - 0xA8EC: 84, - 0xA8ED: 84, - 0xA8EE: 84, - 0xA8EF: 84, - 0xA8F0: 84, - 0xA8F1: 84, - 0xA8FF: 84, - 0xA926: 84, - 0xA927: 84, - 0xA928: 84, - 0xA929: 84, - 0xA92A: 84, - 0xA92B: 84, - 0xA92C: 84, - 0xA92D: 84, - 0xA947: 84, - 0xA948: 84, - 0xA949: 84, - 0xA94A: 84, - 0xA94B: 84, - 0xA94C: 84, - 0xA94D: 84, - 0xA94E: 84, - 0xA94F: 84, - 0xA950: 84, - 0xA951: 84, - 0xA980: 84, - 0xA981: 84, - 0xA982: 84, - 0xA9B3: 84, - 0xA9B6: 84, - 0xA9B7: 84, - 0xA9B8: 84, - 0xA9B9: 84, - 0xA9BC: 84, - 0xA9BD: 84, - 0xA9E5: 84, - 0xAA29: 84, - 0xAA2A: 84, - 0xAA2B: 84, - 0xAA2C: 84, - 0xAA2D: 84, - 0xAA2E: 84, - 0xAA31: 84, - 0xAA32: 84, - 0xAA35: 84, - 0xAA36: 84, - 0xAA43: 84, - 0xAA4C: 84, - 0xAA7C: 84, - 0xAAB0: 84, - 0xAAB2: 84, - 0xAAB3: 84, - 0xAAB4: 84, - 0xAAB7: 84, - 0xAAB8: 84, - 0xAABE: 84, - 0xAABF: 84, - 0xAAC1: 84, - 0xAAEC: 84, - 0xAAED: 84, - 0xAAF6: 84, - 0xABE5: 84, - 0xABE8: 84, - 0xABED: 84, - 0xFB1E: 84, - 0xFE00: 84, - 0xFE01: 84, - 0xFE02: 84, - 0xFE03: 84, - 0xFE04: 84, - 0xFE05: 84, - 0xFE06: 84, - 0xFE07: 84, - 0xFE08: 84, - 0xFE09: 84, - 0xFE0A: 84, - 0xFE0B: 84, - 0xFE0C: 84, - 0xFE0D: 84, - 0xFE0E: 84, - 0xFE0F: 84, - 0xFE20: 84, - 0xFE21: 84, - 0xFE22: 84, - 0xFE23: 84, - 0xFE24: 84, - 0xFE25: 84, - 0xFE26: 84, - 0xFE27: 84, - 0xFE28: 84, - 0xFE29: 84, - 0xFE2A: 84, - 0xFE2B: 84, - 0xFE2C: 84, - 0xFE2D: 84, - 0xFE2E: 84, - 0xFE2F: 84, - 0xFEFF: 84, - 0xFFF9: 84, - 0xFFFA: 84, - 0xFFFB: 84, - 0x101FD: 84, - 0x102E0: 84, - 0x10376: 84, - 0x10377: 84, - 0x10378: 84, - 0x10379: 84, - 0x1037A: 84, - 0x10A01: 84, - 0x10A02: 84, - 0x10A03: 84, - 0x10A05: 84, - 0x10A06: 84, - 0x10A0C: 84, - 0x10A0D: 84, - 0x10A0E: 84, - 0x10A0F: 84, - 0x10A38: 84, - 0x10A39: 84, - 0x10A3A: 84, - 0x10A3F: 84, - 0x10AC0: 68, - 0x10AC1: 68, - 0x10AC2: 68, - 0x10AC3: 68, - 0x10AC4: 68, - 0x10AC5: 82, - 0x10AC7: 82, - 0x10AC9: 82, - 0x10ACA: 82, - 0x10ACD: 76, - 0x10ACE: 82, - 0x10ACF: 82, - 0x10AD0: 82, - 0x10AD1: 82, - 0x10AD2: 82, - 0x10AD3: 68, - 0x10AD4: 68, - 0x10AD5: 68, - 0x10AD6: 68, - 0x10AD7: 76, - 0x10AD8: 68, - 0x10AD9: 68, - 0x10ADA: 68, - 0x10ADB: 68, - 0x10ADC: 68, - 0x10ADD: 82, - 0x10ADE: 68, - 0x10ADF: 68, - 0x10AE0: 68, - 0x10AE1: 82, - 0x10AE4: 82, - 0x10AE5: 84, - 0x10AE6: 84, - 0x10AEB: 68, - 0x10AEC: 68, - 0x10AED: 68, - 0x10AEE: 68, - 0x10AEF: 82, - 0x10B80: 68, - 0x10B81: 82, - 0x10B82: 68, - 0x10B83: 82, - 0x10B84: 82, - 0x10B85: 82, - 0x10B86: 68, - 0x10B87: 68, - 0x10B88: 68, - 0x10B89: 82, - 0x10B8A: 68, - 0x10B8B: 68, - 0x10B8C: 82, - 0x10B8D: 68, - 0x10B8E: 82, - 0x10B8F: 82, - 0x10B90: 68, - 0x10B91: 82, - 0x10BA9: 82, - 0x10BAA: 82, - 0x10BAB: 82, - 0x10BAC: 82, - 0x10BAD: 68, - 0x10BAE: 68, - 0x10D00: 76, - 0x10D01: 68, - 0x10D02: 68, - 0x10D03: 68, - 0x10D04: 68, - 0x10D05: 68, - 0x10D06: 68, - 0x10D07: 68, - 0x10D08: 68, - 0x10D09: 68, - 0x10D0A: 68, - 0x10D0B: 68, - 0x10D0C: 68, - 0x10D0D: 68, - 0x10D0E: 68, - 0x10D0F: 68, - 0x10D10: 68, - 0x10D11: 68, - 0x10D12: 68, - 0x10D13: 68, - 0x10D14: 68, - 0x10D15: 68, - 0x10D16: 68, - 0x10D17: 68, - 0x10D18: 68, - 0x10D19: 68, - 0x10D1A: 68, - 0x10D1B: 68, - 0x10D1C: 68, - 0x10D1D: 68, - 0x10D1E: 68, - 0x10D1F: 68, - 0x10D20: 68, - 0x10D21: 68, - 0x10D22: 82, - 0x10D23: 68, - 0x10D24: 84, - 0x10D25: 84, - 0x10D26: 84, - 0x10D27: 84, - 0x10EAB: 84, - 0x10EAC: 84, - 0x10EFD: 84, - 0x10EFE: 84, - 0x10EFF: 84, - 0x10F30: 68, - 0x10F31: 68, - 0x10F32: 68, - 0x10F33: 82, - 0x10F34: 68, - 0x10F35: 68, - 0x10F36: 68, - 0x10F37: 68, - 0x10F38: 68, - 0x10F39: 68, - 0x10F3A: 68, - 0x10F3B: 68, - 0x10F3C: 68, - 0x10F3D: 68, - 0x10F3E: 68, - 0x10F3F: 68, - 0x10F40: 68, - 0x10F41: 68, - 0x10F42: 68, - 0x10F43: 68, - 0x10F44: 68, - 0x10F46: 84, - 0x10F47: 84, - 0x10F48: 84, - 0x10F49: 84, - 0x10F4A: 84, - 0x10F4B: 84, - 0x10F4C: 84, - 0x10F4D: 84, - 0x10F4E: 84, - 0x10F4F: 84, - 0x10F50: 84, - 0x10F51: 68, - 0x10F52: 68, - 0x10F53: 68, - 0x10F54: 82, - 0x10F70: 68, - 0x10F71: 68, - 0x10F72: 68, - 0x10F73: 68, - 0x10F74: 82, - 0x10F75: 82, - 0x10F76: 68, - 0x10F77: 68, - 0x10F78: 68, - 0x10F79: 68, - 0x10F7A: 68, - 0x10F7B: 68, - 0x10F7C: 68, - 0x10F7D: 68, - 0x10F7E: 68, - 0x10F7F: 68, - 0x10F80: 68, - 0x10F81: 68, - 0x10F82: 84, - 0x10F83: 84, - 0x10F84: 84, - 0x10F85: 84, - 0x10FB0: 68, - 0x10FB2: 68, - 0x10FB3: 68, - 0x10FB4: 82, - 0x10FB5: 82, - 0x10FB6: 82, - 0x10FB8: 68, - 0x10FB9: 82, - 0x10FBA: 82, - 0x10FBB: 68, - 0x10FBC: 68, - 0x10FBD: 82, - 0x10FBE: 68, - 0x10FBF: 68, - 0x10FC1: 68, - 0x10FC2: 82, - 0x10FC3: 82, - 0x10FC4: 68, - 0x10FC9: 82, - 0x10FCA: 68, - 0x10FCB: 76, - 0x11001: 84, - 0x11038: 84, - 0x11039: 84, - 0x1103A: 84, - 0x1103B: 84, - 0x1103C: 84, - 0x1103D: 84, - 0x1103E: 84, - 0x1103F: 84, - 0x11040: 84, - 0x11041: 84, - 0x11042: 84, - 0x11043: 84, - 0x11044: 84, - 0x11045: 84, - 0x11046: 84, - 0x11070: 84, - 0x11073: 84, - 0x11074: 84, - 0x1107F: 84, - 0x11080: 84, - 0x11081: 84, - 0x110B3: 84, - 0x110B4: 84, - 0x110B5: 84, - 0x110B6: 84, - 0x110B9: 84, - 0x110BA: 84, - 0x110C2: 84, - 0x11100: 84, - 0x11101: 84, - 0x11102: 84, - 0x11127: 84, - 0x11128: 84, - 0x11129: 84, - 0x1112A: 84, - 0x1112B: 84, - 0x1112D: 84, - 0x1112E: 84, - 0x1112F: 84, - 0x11130: 84, - 0x11131: 84, - 0x11132: 84, - 0x11133: 84, - 0x11134: 84, - 0x11173: 84, - 0x11180: 84, - 0x11181: 84, - 0x111B6: 84, - 0x111B7: 84, - 0x111B8: 84, - 0x111B9: 84, - 0x111BA: 84, - 0x111BB: 84, - 0x111BC: 84, - 0x111BD: 84, - 0x111BE: 84, - 0x111C9: 84, - 0x111CA: 84, - 0x111CB: 84, - 0x111CC: 84, - 0x111CF: 84, - 0x1122F: 84, - 0x11230: 84, - 0x11231: 84, - 0x11234: 84, - 0x11236: 84, - 0x11237: 84, - 0x1123E: 84, - 0x11241: 84, - 0x112DF: 84, - 0x112E3: 84, - 0x112E4: 84, - 0x112E5: 84, - 0x112E6: 84, - 0x112E7: 84, - 0x112E8: 84, - 0x112E9: 84, - 0x112EA: 84, - 0x11300: 84, - 0x11301: 84, - 0x1133B: 84, - 0x1133C: 84, - 0x11340: 84, - 0x11366: 84, - 0x11367: 84, - 0x11368: 84, - 0x11369: 84, - 0x1136A: 84, - 0x1136B: 84, - 0x1136C: 84, - 0x11370: 84, - 0x11371: 84, - 0x11372: 84, - 0x11373: 84, - 0x11374: 84, - 0x11438: 84, - 0x11439: 84, - 0x1143A: 84, - 0x1143B: 84, - 0x1143C: 84, - 0x1143D: 84, - 0x1143E: 84, - 0x1143F: 84, - 0x11442: 84, - 0x11443: 84, - 0x11444: 84, - 0x11446: 84, - 0x1145E: 84, - 0x114B3: 84, - 0x114B4: 84, - 0x114B5: 84, - 0x114B6: 84, - 0x114B7: 84, - 0x114B8: 84, - 0x114BA: 84, - 0x114BF: 84, - 0x114C0: 84, - 0x114C2: 84, - 0x114C3: 84, - 0x115B2: 84, - 0x115B3: 84, - 0x115B4: 84, - 0x115B5: 84, - 0x115BC: 84, - 0x115BD: 84, - 0x115BF: 84, - 0x115C0: 84, - 0x115DC: 84, - 0x115DD: 84, - 0x11633: 84, - 0x11634: 84, - 0x11635: 84, - 0x11636: 84, - 0x11637: 84, - 0x11638: 84, - 0x11639: 84, - 0x1163A: 84, - 0x1163D: 84, - 0x1163F: 84, - 0x11640: 84, - 0x116AB: 84, - 0x116AD: 84, - 0x116B0: 84, - 0x116B1: 84, - 0x116B2: 84, - 0x116B3: 84, - 0x116B4: 84, - 0x116B5: 84, - 0x116B7: 84, - 0x1171D: 84, - 0x1171E: 84, - 0x1171F: 84, - 0x11722: 84, - 0x11723: 84, - 0x11724: 84, - 0x11725: 84, - 0x11727: 84, - 0x11728: 84, - 0x11729: 84, - 0x1172A: 84, - 0x1172B: 84, - 0x1182F: 84, - 0x11830: 84, - 0x11831: 84, - 0x11832: 84, - 0x11833: 84, - 0x11834: 84, - 0x11835: 84, - 0x11836: 84, - 0x11837: 84, - 0x11839: 84, - 0x1183A: 84, - 0x1193B: 84, - 0x1193C: 84, - 0x1193E: 84, - 0x11943: 84, - 0x119D4: 84, - 0x119D5: 84, - 0x119D6: 84, - 0x119D7: 84, - 0x119DA: 84, - 0x119DB: 84, - 0x119E0: 84, - 0x11A01: 84, - 0x11A02: 84, - 0x11A03: 84, - 0x11A04: 84, - 0x11A05: 84, - 0x11A06: 84, - 0x11A07: 84, - 0x11A08: 84, - 0x11A09: 84, - 0x11A0A: 84, - 0x11A33: 84, - 0x11A34: 84, - 0x11A35: 84, - 0x11A36: 84, - 0x11A37: 84, - 0x11A38: 84, - 0x11A3B: 84, - 0x11A3C: 84, - 0x11A3D: 84, - 0x11A3E: 84, - 0x11A47: 84, - 0x11A51: 84, - 0x11A52: 84, - 0x11A53: 84, - 0x11A54: 84, - 0x11A55: 84, - 0x11A56: 84, - 0x11A59: 84, - 0x11A5A: 84, - 0x11A5B: 84, - 0x11A8A: 84, - 0x11A8B: 84, - 0x11A8C: 84, - 0x11A8D: 84, - 0x11A8E: 84, - 0x11A8F: 84, - 0x11A90: 84, - 0x11A91: 84, - 0x11A92: 84, - 0x11A93: 84, - 0x11A94: 84, - 0x11A95: 84, - 0x11A96: 84, - 0x11A98: 84, - 0x11A99: 84, - 0x11C30: 84, - 0x11C31: 84, - 0x11C32: 84, - 0x11C33: 84, - 0x11C34: 84, - 0x11C35: 84, - 0x11C36: 84, - 0x11C38: 84, - 0x11C39: 84, - 0x11C3A: 84, - 0x11C3B: 84, - 0x11C3C: 84, - 0x11C3D: 84, - 0x11C3F: 84, - 0x11C92: 84, - 0x11C93: 84, - 0x11C94: 84, - 0x11C95: 84, - 0x11C96: 84, - 0x11C97: 84, - 0x11C98: 84, - 0x11C99: 84, - 0x11C9A: 84, - 0x11C9B: 84, - 0x11C9C: 84, - 0x11C9D: 84, - 0x11C9E: 84, - 0x11C9F: 84, - 0x11CA0: 84, - 0x11CA1: 84, - 0x11CA2: 84, - 0x11CA3: 84, - 0x11CA4: 84, - 0x11CA5: 84, - 0x11CA6: 84, - 0x11CA7: 84, - 0x11CAA: 84, - 0x11CAB: 84, - 0x11CAC: 84, - 0x11CAD: 84, - 0x11CAE: 84, - 0x11CAF: 84, - 0x11CB0: 84, - 0x11CB2: 84, - 0x11CB3: 84, - 0x11CB5: 84, - 0x11CB6: 84, - 0x11D31: 84, - 0x11D32: 84, - 0x11D33: 84, - 0x11D34: 84, - 0x11D35: 84, - 0x11D36: 84, - 0x11D3A: 84, - 0x11D3C: 84, - 0x11D3D: 84, - 0x11D3F: 84, - 0x11D40: 84, - 0x11D41: 84, - 0x11D42: 84, - 0x11D43: 84, - 0x11D44: 84, - 0x11D45: 84, - 0x11D47: 84, - 0x11D90: 84, - 0x11D91: 84, - 0x11D95: 84, - 0x11D97: 84, - 0x11EF3: 84, - 0x11EF4: 84, - 0x11F00: 84, - 0x11F01: 84, - 0x11F36: 84, - 0x11F37: 84, - 0x11F38: 84, - 0x11F39: 84, - 0x11F3A: 84, - 0x11F40: 84, - 0x11F42: 84, - 0x13430: 84, - 0x13431: 84, - 0x13432: 84, - 0x13433: 84, - 0x13434: 84, - 0x13435: 84, - 0x13436: 84, - 0x13437: 84, - 0x13438: 84, - 0x13439: 84, - 0x1343A: 84, - 0x1343B: 84, - 0x1343C: 84, - 0x1343D: 84, - 0x1343E: 84, - 0x1343F: 84, - 0x13440: 84, - 0x13447: 84, - 0x13448: 84, - 0x13449: 84, - 0x1344A: 84, - 0x1344B: 84, - 0x1344C: 84, - 0x1344D: 84, - 0x1344E: 84, - 0x1344F: 84, - 0x13450: 84, - 0x13451: 84, - 0x13452: 84, - 0x13453: 84, - 0x13454: 84, - 0x13455: 84, - 0x16AF0: 84, - 0x16AF1: 84, - 0x16AF2: 84, - 0x16AF3: 84, - 0x16AF4: 84, - 0x16B30: 84, - 0x16B31: 84, - 0x16B32: 84, - 0x16B33: 84, - 0x16B34: 84, - 0x16B35: 84, - 0x16B36: 84, - 0x16F4F: 84, - 0x16F8F: 84, - 0x16F90: 84, - 0x16F91: 84, - 0x16F92: 84, - 0x16FE4: 84, - 0x1BC9D: 84, - 0x1BC9E: 84, - 0x1BCA0: 84, - 0x1BCA1: 84, - 0x1BCA2: 84, - 0x1BCA3: 84, - 0x1CF00: 84, - 0x1CF01: 84, - 0x1CF02: 84, - 0x1CF03: 84, - 0x1CF04: 84, - 0x1CF05: 84, - 0x1CF06: 84, - 0x1CF07: 84, - 0x1CF08: 84, - 0x1CF09: 84, - 0x1CF0A: 84, - 0x1CF0B: 84, - 0x1CF0C: 84, - 0x1CF0D: 84, - 0x1CF0E: 84, - 0x1CF0F: 84, - 0x1CF10: 84, - 0x1CF11: 84, - 0x1CF12: 84, - 0x1CF13: 84, - 0x1CF14: 84, - 0x1CF15: 84, - 0x1CF16: 84, - 0x1CF17: 84, - 0x1CF18: 84, - 0x1CF19: 84, - 0x1CF1A: 84, - 0x1CF1B: 84, - 0x1CF1C: 84, - 0x1CF1D: 84, - 0x1CF1E: 84, - 0x1CF1F: 84, - 0x1CF20: 84, - 0x1CF21: 84, - 0x1CF22: 84, - 0x1CF23: 84, - 0x1CF24: 84, - 0x1CF25: 84, - 0x1CF26: 84, - 0x1CF27: 84, - 0x1CF28: 84, - 0x1CF29: 84, - 0x1CF2A: 84, - 0x1CF2B: 84, - 0x1CF2C: 84, - 0x1CF2D: 84, - 0x1CF30: 84, - 0x1CF31: 84, - 0x1CF32: 84, - 0x1CF33: 84, - 0x1CF34: 84, - 0x1CF35: 84, - 0x1CF36: 84, - 0x1CF37: 84, - 0x1CF38: 84, - 0x1CF39: 84, - 0x1CF3A: 84, - 0x1CF3B: 84, - 0x1CF3C: 84, - 0x1CF3D: 84, - 0x1CF3E: 84, - 0x1CF3F: 84, - 0x1CF40: 84, - 0x1CF41: 84, - 0x1CF42: 84, - 0x1CF43: 84, - 0x1CF44: 84, - 0x1CF45: 84, - 0x1CF46: 84, - 0x1D167: 84, - 0x1D168: 84, - 0x1D169: 84, - 0x1D173: 84, - 0x1D174: 84, - 0x1D175: 84, - 0x1D176: 84, - 0x1D177: 84, - 0x1D178: 84, - 0x1D179: 84, - 0x1D17A: 84, - 0x1D17B: 84, - 0x1D17C: 84, - 0x1D17D: 84, - 0x1D17E: 84, - 0x1D17F: 84, - 0x1D180: 84, - 0x1D181: 84, - 0x1D182: 84, - 0x1D185: 84, - 0x1D186: 84, - 0x1D187: 84, - 0x1D188: 84, - 0x1D189: 84, - 0x1D18A: 84, - 0x1D18B: 84, - 0x1D1AA: 84, - 0x1D1AB: 84, - 0x1D1AC: 84, - 0x1D1AD: 84, - 0x1D242: 84, - 0x1D243: 84, - 0x1D244: 84, - 0x1DA00: 84, - 0x1DA01: 84, - 0x1DA02: 84, - 0x1DA03: 84, - 0x1DA04: 84, - 0x1DA05: 84, - 0x1DA06: 84, - 0x1DA07: 84, - 0x1DA08: 84, - 0x1DA09: 84, - 0x1DA0A: 84, - 0x1DA0B: 84, - 0x1DA0C: 84, - 0x1DA0D: 84, - 0x1DA0E: 84, - 0x1DA0F: 84, - 0x1DA10: 84, - 0x1DA11: 84, - 0x1DA12: 84, - 0x1DA13: 84, - 0x1DA14: 84, - 0x1DA15: 84, - 0x1DA16: 84, - 0x1DA17: 84, - 0x1DA18: 84, - 0x1DA19: 84, - 0x1DA1A: 84, - 0x1DA1B: 84, - 0x1DA1C: 84, - 0x1DA1D: 84, - 0x1DA1E: 84, - 0x1DA1F: 84, - 0x1DA20: 84, - 0x1DA21: 84, - 0x1DA22: 84, - 0x1DA23: 84, - 0x1DA24: 84, - 0x1DA25: 84, - 0x1DA26: 84, - 0x1DA27: 84, - 0x1DA28: 84, - 0x1DA29: 84, - 0x1DA2A: 84, - 0x1DA2B: 84, - 0x1DA2C: 84, - 0x1DA2D: 84, - 0x1DA2E: 84, - 0x1DA2F: 84, - 0x1DA30: 84, - 0x1DA31: 84, - 0x1DA32: 84, - 0x1DA33: 84, - 0x1DA34: 84, - 0x1DA35: 84, - 0x1DA36: 84, - 0x1DA3B: 84, - 0x1DA3C: 84, - 0x1DA3D: 84, - 0x1DA3E: 84, - 0x1DA3F: 84, - 0x1DA40: 84, - 0x1DA41: 84, - 0x1DA42: 84, - 0x1DA43: 84, - 0x1DA44: 84, - 0x1DA45: 84, - 0x1DA46: 84, - 0x1DA47: 84, - 0x1DA48: 84, - 0x1DA49: 84, - 0x1DA4A: 84, - 0x1DA4B: 84, - 0x1DA4C: 84, - 0x1DA4D: 84, - 0x1DA4E: 84, - 0x1DA4F: 84, - 0x1DA50: 84, - 0x1DA51: 84, - 0x1DA52: 84, - 0x1DA53: 84, - 0x1DA54: 84, - 0x1DA55: 84, - 0x1DA56: 84, - 0x1DA57: 84, - 0x1DA58: 84, - 0x1DA59: 84, - 0x1DA5A: 84, - 0x1DA5B: 84, - 0x1DA5C: 84, - 0x1DA5D: 84, - 0x1DA5E: 84, - 0x1DA5F: 84, - 0x1DA60: 84, - 0x1DA61: 84, - 0x1DA62: 84, - 0x1DA63: 84, - 0x1DA64: 84, - 0x1DA65: 84, - 0x1DA66: 84, - 0x1DA67: 84, - 0x1DA68: 84, - 0x1DA69: 84, - 0x1DA6A: 84, - 0x1DA6B: 84, - 0x1DA6C: 84, - 0x1DA75: 84, - 0x1DA84: 84, - 0x1DA9B: 84, - 0x1DA9C: 84, - 0x1DA9D: 84, - 0x1DA9E: 84, - 0x1DA9F: 84, - 0x1DAA1: 84, - 0x1DAA2: 84, - 0x1DAA3: 84, - 0x1DAA4: 84, - 0x1DAA5: 84, - 0x1DAA6: 84, - 0x1DAA7: 84, - 0x1DAA8: 84, - 0x1DAA9: 84, - 0x1DAAA: 84, - 0x1DAAB: 84, - 0x1DAAC: 84, - 0x1DAAD: 84, - 0x1DAAE: 84, - 0x1DAAF: 84, - 0x1E000: 84, - 0x1E001: 84, - 0x1E002: 84, - 0x1E003: 84, - 0x1E004: 84, - 0x1E005: 84, - 0x1E006: 84, - 0x1E008: 84, - 0x1E009: 84, - 0x1E00A: 84, - 0x1E00B: 84, - 0x1E00C: 84, - 0x1E00D: 84, - 0x1E00E: 84, - 0x1E00F: 84, - 0x1E010: 84, - 0x1E011: 84, - 0x1E012: 84, - 0x1E013: 84, - 0x1E014: 84, - 0x1E015: 84, - 0x1E016: 84, - 0x1E017: 84, - 0x1E018: 84, - 0x1E01B: 84, - 0x1E01C: 84, - 0x1E01D: 84, - 0x1E01E: 84, - 0x1E01F: 84, - 0x1E020: 84, - 0x1E021: 84, - 0x1E023: 84, - 0x1E024: 84, - 0x1E026: 84, - 0x1E027: 84, - 0x1E028: 84, - 0x1E029: 84, - 0x1E02A: 84, - 0x1E08F: 84, - 0x1E130: 84, - 0x1E131: 84, - 0x1E132: 84, - 0x1E133: 84, - 0x1E134: 84, - 0x1E135: 84, - 0x1E136: 84, - 0x1E2AE: 84, - 0x1E2EC: 84, - 0x1E2ED: 84, - 0x1E2EE: 84, - 0x1E2EF: 84, - 0x1E4EC: 84, - 0x1E4ED: 84, - 0x1E4EE: 84, - 0x1E4EF: 84, - 0x1E8D0: 84, - 0x1E8D1: 84, - 0x1E8D2: 84, - 0x1E8D3: 84, - 0x1E8D4: 84, - 0x1E8D5: 84, - 0x1E8D6: 84, - 0x1E900: 68, - 0x1E901: 68, - 0x1E902: 68, - 0x1E903: 68, - 0x1E904: 68, - 0x1E905: 68, - 0x1E906: 68, - 0x1E907: 68, - 0x1E908: 68, - 0x1E909: 68, - 0x1E90A: 68, - 0x1E90B: 68, - 0x1E90C: 68, - 0x1E90D: 68, - 0x1E90E: 68, - 0x1E90F: 68, - 0x1E910: 68, - 0x1E911: 68, - 0x1E912: 68, - 0x1E913: 68, - 0x1E914: 68, - 0x1E915: 68, - 0x1E916: 68, - 0x1E917: 68, - 0x1E918: 68, - 0x1E919: 68, - 0x1E91A: 68, - 0x1E91B: 68, - 0x1E91C: 68, - 0x1E91D: 68, - 0x1E91E: 68, - 0x1E91F: 68, - 0x1E920: 68, - 0x1E921: 68, - 0x1E922: 68, - 0x1E923: 68, - 0x1E924: 68, - 0x1E925: 68, - 0x1E926: 68, - 0x1E927: 68, - 0x1E928: 68, - 0x1E929: 68, - 0x1E92A: 68, - 0x1E92B: 68, - 0x1E92C: 68, - 0x1E92D: 68, - 0x1E92E: 68, - 0x1E92F: 68, - 0x1E930: 68, - 0x1E931: 68, - 0x1E932: 68, - 0x1E933: 68, - 0x1E934: 68, - 0x1E935: 68, - 0x1E936: 68, - 0x1E937: 68, - 0x1E938: 68, - 0x1E939: 68, - 0x1E93A: 68, - 0x1E93B: 68, - 0x1E93C: 68, - 0x1E93D: 68, - 0x1E93E: 68, - 0x1E93F: 68, - 0x1E940: 68, - 0x1E941: 68, - 0x1E942: 68, - 0x1E943: 68, - 0x1E944: 84, - 0x1E945: 84, - 0x1E946: 84, - 0x1E947: 84, - 0x1E948: 84, - 0x1E949: 84, - 0x1E94A: 84, - 0x1E94B: 84, - 0xE0001: 84, - 0xE0020: 84, - 0xE0021: 84, - 0xE0022: 84, - 0xE0023: 84, - 0xE0024: 84, - 0xE0025: 84, - 0xE0026: 84, - 0xE0027: 84, - 0xE0028: 84, - 0xE0029: 84, - 0xE002A: 84, - 0xE002B: 84, - 0xE002C: 84, - 0xE002D: 84, - 0xE002E: 84, - 0xE002F: 84, - 0xE0030: 84, - 0xE0031: 84, - 0xE0032: 84, - 0xE0033: 84, - 0xE0034: 84, - 0xE0035: 84, - 0xE0036: 84, - 0xE0037: 84, - 0xE0038: 84, - 0xE0039: 84, - 0xE003A: 84, - 0xE003B: 84, - 0xE003C: 84, - 0xE003D: 84, - 0xE003E: 84, - 0xE003F: 84, - 0xE0040: 84, - 0xE0041: 84, - 0xE0042: 84, - 0xE0043: 84, - 0xE0044: 84, - 0xE0045: 84, - 0xE0046: 84, - 0xE0047: 84, - 0xE0048: 84, - 0xE0049: 84, - 0xE004A: 84, - 0xE004B: 84, - 0xE004C: 84, - 0xE004D: 84, - 0xE004E: 84, - 0xE004F: 84, - 0xE0050: 84, - 0xE0051: 84, - 0xE0052: 84, - 0xE0053: 84, - 0xE0054: 84, - 0xE0055: 84, - 0xE0056: 84, - 0xE0057: 84, - 0xE0058: 84, - 0xE0059: 84, - 0xE005A: 84, - 0xE005B: 84, - 0xE005C: 84, - 0xE005D: 84, - 0xE005E: 84, - 0xE005F: 84, - 0xE0060: 84, - 0xE0061: 84, - 0xE0062: 84, - 0xE0063: 84, - 0xE0064: 84, - 0xE0065: 84, - 0xE0066: 84, - 0xE0067: 84, - 0xE0068: 84, - 0xE0069: 84, - 0xE006A: 84, - 0xE006B: 84, - 0xE006C: 84, - 0xE006D: 84, - 0xE006E: 84, - 0xE006F: 84, - 0xE0070: 84, - 0xE0071: 84, - 0xE0072: 84, - 0xE0073: 84, - 0xE0074: 84, - 0xE0075: 84, - 0xE0076: 84, - 0xE0077: 84, - 0xE0078: 84, - 0xE0079: 84, - 0xE007A: 84, - 0xE007B: 84, - 0xE007C: 84, - 0xE007D: 84, - 0xE007E: 84, - 0xE007F: 84, - 0xE0100: 84, - 0xE0101: 84, - 0xE0102: 84, - 0xE0103: 84, - 0xE0104: 84, - 0xE0105: 84, - 0xE0106: 84, - 0xE0107: 84, - 0xE0108: 84, - 0xE0109: 84, - 0xE010A: 84, - 0xE010B: 84, - 0xE010C: 84, - 0xE010D: 84, - 0xE010E: 84, - 0xE010F: 84, - 0xE0110: 84, - 0xE0111: 84, - 0xE0112: 84, - 0xE0113: 84, - 0xE0114: 84, - 0xE0115: 84, - 0xE0116: 84, - 0xE0117: 84, - 0xE0118: 84, - 0xE0119: 84, - 0xE011A: 84, - 0xE011B: 84, - 0xE011C: 84, - 0xE011D: 84, - 0xE011E: 84, - 0xE011F: 84, - 0xE0120: 84, - 0xE0121: 84, - 0xE0122: 84, - 0xE0123: 84, - 0xE0124: 84, - 0xE0125: 84, - 0xE0126: 84, - 0xE0127: 84, - 0xE0128: 84, - 0xE0129: 84, - 0xE012A: 84, - 0xE012B: 84, - 0xE012C: 84, - 0xE012D: 84, - 0xE012E: 84, - 0xE012F: 84, - 0xE0130: 84, - 0xE0131: 84, - 0xE0132: 84, - 0xE0133: 84, - 0xE0134: 84, - 0xE0135: 84, - 0xE0136: 84, - 0xE0137: 84, - 0xE0138: 84, - 0xE0139: 84, - 0xE013A: 84, - 0xE013B: 84, - 0xE013C: 84, - 0xE013D: 84, - 0xE013E: 84, - 0xE013F: 84, - 0xE0140: 84, - 0xE0141: 84, - 0xE0142: 84, - 0xE0143: 84, - 0xE0144: 84, - 0xE0145: 84, - 0xE0146: 84, - 0xE0147: 84, - 0xE0148: 84, - 0xE0149: 84, - 0xE014A: 84, - 0xE014B: 84, - 0xE014C: 84, - 0xE014D: 84, - 0xE014E: 84, - 0xE014F: 84, - 0xE0150: 84, - 0xE0151: 84, - 0xE0152: 84, - 0xE0153: 84, - 0xE0154: 84, - 0xE0155: 84, - 0xE0156: 84, - 0xE0157: 84, - 0xE0158: 84, - 0xE0159: 84, - 0xE015A: 84, - 0xE015B: 84, - 0xE015C: 84, - 0xE015D: 84, - 0xE015E: 84, - 0xE015F: 84, - 0xE0160: 84, - 0xE0161: 84, - 0xE0162: 84, - 0xE0163: 84, - 0xE0164: 84, - 0xE0165: 84, - 0xE0166: 84, - 0xE0167: 84, - 0xE0168: 84, - 0xE0169: 84, - 0xE016A: 84, - 0xE016B: 84, - 0xE016C: 84, - 0xE016D: 84, - 0xE016E: 84, - 0xE016F: 84, - 0xE0170: 84, - 0xE0171: 84, - 0xE0172: 84, - 0xE0173: 84, - 0xE0174: 84, - 0xE0175: 84, - 0xE0176: 84, - 0xE0177: 84, - 0xE0178: 84, - 0xE0179: 84, - 0xE017A: 84, - 0xE017B: 84, - 0xE017C: 84, - 0xE017D: 84, - 0xE017E: 84, - 0xE017F: 84, - 0xE0180: 84, - 0xE0181: 84, - 0xE0182: 84, - 0xE0183: 84, - 0xE0184: 84, - 0xE0185: 84, - 0xE0186: 84, - 0xE0187: 84, - 0xE0188: 84, - 0xE0189: 84, - 0xE018A: 84, - 0xE018B: 84, - 0xE018C: 84, - 0xE018D: 84, - 0xE018E: 84, - 0xE018F: 84, - 0xE0190: 84, - 0xE0191: 84, - 0xE0192: 84, - 0xE0193: 84, - 0xE0194: 84, - 0xE0195: 84, - 0xE0196: 84, - 0xE0197: 84, - 0xE0198: 84, - 0xE0199: 84, - 0xE019A: 84, - 0xE019B: 84, - 0xE019C: 84, - 0xE019D: 84, - 0xE019E: 84, - 0xE019F: 84, - 0xE01A0: 84, - 0xE01A1: 84, - 0xE01A2: 84, - 0xE01A3: 84, - 0xE01A4: 84, - 0xE01A5: 84, - 0xE01A6: 84, - 0xE01A7: 84, - 0xE01A8: 84, - 0xE01A9: 84, - 0xE01AA: 84, - 0xE01AB: 84, - 0xE01AC: 84, - 0xE01AD: 84, - 0xE01AE: 84, - 0xE01AF: 84, - 0xE01B0: 84, - 0xE01B1: 84, - 0xE01B2: 84, - 0xE01B3: 84, - 0xE01B4: 84, - 0xE01B5: 84, - 0xE01B6: 84, - 0xE01B7: 84, - 0xE01B8: 84, - 0xE01B9: 84, - 0xE01BA: 84, - 0xE01BB: 84, - 0xE01BC: 84, - 0xE01BD: 84, - 0xE01BE: 84, - 0xE01BF: 84, - 0xE01C0: 84, - 0xE01C1: 84, - 0xE01C2: 84, - 0xE01C3: 84, - 0xE01C4: 84, - 0xE01C5: 84, - 0xE01C6: 84, - 0xE01C7: 84, - 0xE01C8: 84, - 0xE01C9: 84, - 0xE01CA: 84, - 0xE01CB: 84, - 0xE01CC: 84, - 0xE01CD: 84, - 0xE01CE: 84, - 0xE01CF: 84, - 0xE01D0: 84, - 0xE01D1: 84, - 0xE01D2: 84, - 0xE01D3: 84, - 0xE01D4: 84, - 0xE01D5: 84, - 0xE01D6: 84, - 0xE01D7: 84, - 0xE01D8: 84, - 0xE01D9: 84, - 0xE01DA: 84, - 0xE01DB: 84, - 0xE01DC: 84, - 0xE01DD: 84, - 0xE01DE: 84, - 0xE01DF: 84, - 0xE01E0: 84, - 0xE01E1: 84, - 0xE01E2: 84, - 0xE01E3: 84, - 0xE01E4: 84, - 0xE01E5: 84, - 0xE01E6: 84, - 0xE01E7: 84, - 0xE01E8: 84, - 0xE01E9: 84, - 0xE01EA: 84, - 0xE01EB: 84, - 0xE01EC: 84, - 0xE01ED: 84, - 0xE01EE: 84, - 0xE01EF: 84, -} -codepoint_classes = { - "PVALID": ( - 0x2D0000002E, - 0x300000003A, - 0x610000007B, - 0xDF000000F7, - 0xF800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010A, - 0x10B0000010C, - 0x10D0000010E, - 0x10F00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011A, - 0x11B0000011C, - 0x11D0000011E, - 0x11F00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012A, - 0x12B0000012C, - 0x12D0000012E, - 0x12F00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13A0000013B, - 0x13C0000013D, - 0x13E0000013F, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14B0000014C, - 0x14D0000014E, - 0x14F00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015A, - 0x15B0000015C, - 0x15D0000015E, - 0x15F00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016A, - 0x16B0000016C, - 0x16D0000016E, - 0x16F00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17A0000017B, - 0x17C0000017D, - 0x17E0000017F, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18C0000018E, - 0x19200000193, - 0x19500000196, - 0x1990000019C, - 0x19E0000019F, - 0x1A1000001A2, - 0x1A3000001A4, - 0x1A5000001A6, - 0x1A8000001A9, - 0x1AA000001AC, - 0x1AD000001AE, - 0x1B0000001B1, - 0x1B4000001B5, - 0x1B6000001B7, - 0x1B9000001BC, - 0x1BD000001C4, - 0x1CE000001CF, - 0x1D0000001D1, - 0x1D2000001D3, - 0x1D4000001D5, - 0x1D6000001D7, - 0x1D8000001D9, - 0x1DA000001DB, - 0x1DC000001DE, - 0x1DF000001E0, - 0x1E1000001E2, - 0x1E3000001E4, - 0x1E5000001E6, - 0x1E7000001E8, - 0x1E9000001EA, - 0x1EB000001EC, - 0x1ED000001EE, - 0x1EF000001F1, - 0x1F5000001F6, - 0x1F9000001FA, - 0x1FB000001FC, - 0x1FD000001FE, - 0x1FF00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020A, - 0x20B0000020C, - 0x20D0000020E, - 0x20F00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021A, - 0x21B0000021C, - 0x21D0000021E, - 0x21F00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022A, - 0x22B0000022C, - 0x22D0000022E, - 0x22F00000230, - 0x23100000232, - 0x2330000023A, - 0x23C0000023D, - 0x23F00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024A, - 0x24B0000024C, - 0x24D0000024E, - 0x24F000002B0, - 0x2B9000002C2, - 0x2C6000002D2, - 0x2EC000002ED, - 0x2EE000002EF, - 0x30000000340, - 0x34200000343, - 0x3460000034F, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37B0000037E, - 0x39000000391, - 0x3AC000003CF, - 0x3D7000003D8, - 0x3D9000003DA, - 0x3DB000003DC, - 0x3DD000003DE, - 0x3DF000003E0, - 0x3E1000003E2, - 0x3E3000003E4, - 0x3E5000003E6, - 0x3E7000003E8, - 0x3E9000003EA, - 0x3EB000003EC, - 0x3ED000003EE, - 0x3EF000003F0, - 0x3F3000003F4, - 0x3F8000003F9, - 0x3FB000003FD, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046A, - 0x46B0000046C, - 0x46D0000046E, - 0x46F00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047A, - 0x47B0000047C, - 0x47D0000047E, - 0x47F00000480, - 0x48100000482, - 0x48300000488, - 0x48B0000048C, - 0x48D0000048E, - 0x48F00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049A, - 0x49B0000049C, - 0x49D0000049E, - 0x49F000004A0, - 0x4A1000004A2, - 0x4A3000004A4, - 0x4A5000004A6, - 0x4A7000004A8, - 0x4A9000004AA, - 0x4AB000004AC, - 0x4AD000004AE, - 0x4AF000004B0, - 0x4B1000004B2, - 0x4B3000004B4, - 0x4B5000004B6, - 0x4B7000004B8, - 0x4B9000004BA, - 0x4BB000004BC, - 0x4BD000004BE, - 0x4BF000004C0, - 0x4C2000004C3, - 0x4C4000004C5, - 0x4C6000004C7, - 0x4C8000004C9, - 0x4CA000004CB, - 0x4CC000004CD, - 0x4CE000004D0, - 0x4D1000004D2, - 0x4D3000004D4, - 0x4D5000004D6, - 0x4D7000004D8, - 0x4D9000004DA, - 0x4DB000004DC, - 0x4DD000004DE, - 0x4DF000004E0, - 0x4E1000004E2, - 0x4E3000004E4, - 0x4E5000004E6, - 0x4E7000004E8, - 0x4E9000004EA, - 0x4EB000004EC, - 0x4ED000004EE, - 0x4EF000004F0, - 0x4F1000004F2, - 0x4F3000004F4, - 0x4F5000004F6, - 0x4F7000004F8, - 0x4F9000004FA, - 0x4FB000004FC, - 0x4FD000004FE, - 0x4FF00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050A, - 0x50B0000050C, - 0x50D0000050E, - 0x50F00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051A, - 0x51B0000051C, - 0x51D0000051E, - 0x51F00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052A, - 0x52B0000052C, - 0x52D0000052E, - 0x52F00000530, - 0x5590000055A, - 0x56000000587, - 0x58800000589, - 0x591000005BE, - 0x5BF000005C0, - 0x5C1000005C3, - 0x5C4000005C6, - 0x5C7000005C8, - 0x5D0000005EB, - 0x5EF000005F3, - 0x6100000061B, - 0x62000000640, - 0x64100000660, - 0x66E00000675, - 0x679000006D4, - 0x6D5000006DD, - 0x6DF000006E9, - 0x6EA000006F0, - 0x6FA00000700, - 0x7100000074B, - 0x74D000007B2, - 0x7C0000007F6, - 0x7FD000007FE, - 0x8000000082E, - 0x8400000085C, - 0x8600000086B, - 0x87000000888, - 0x8890000088F, - 0x898000008E2, - 0x8E300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098D, - 0x98F00000991, - 0x993000009A9, - 0x9AA000009B1, - 0x9B2000009B3, - 0x9B6000009BA, - 0x9BC000009C5, - 0x9C7000009C9, - 0x9CB000009CF, - 0x9D7000009D8, - 0x9E0000009E4, - 0x9E6000009F2, - 0x9FC000009FD, - 0x9FE000009FF, - 0xA0100000A04, - 0xA0500000A0B, - 0xA0F00000A11, - 0xA1300000A29, - 0xA2A00000A31, - 0xA3200000A33, - 0xA3500000A36, - 0xA3800000A3A, - 0xA3C00000A3D, - 0xA3E00000A43, - 0xA4700000A49, - 0xA4B00000A4E, - 0xA5100000A52, - 0xA5C00000A5D, - 0xA6600000A76, - 0xA8100000A84, - 0xA8500000A8E, - 0xA8F00000A92, - 0xA9300000AA9, - 0xAAA00000AB1, - 0xAB200000AB4, - 0xAB500000ABA, - 0xABC00000AC6, - 0xAC700000ACA, - 0xACB00000ACE, - 0xAD000000AD1, - 0xAE000000AE4, - 0xAE600000AF0, - 0xAF900000B00, - 0xB0100000B04, - 0xB0500000B0D, - 0xB0F00000B11, - 0xB1300000B29, - 0xB2A00000B31, - 0xB3200000B34, - 0xB3500000B3A, - 0xB3C00000B45, - 0xB4700000B49, - 0xB4B00000B4E, - 0xB5500000B58, - 0xB5F00000B64, - 0xB6600000B70, - 0xB7100000B72, - 0xB8200000B84, - 0xB8500000B8B, - 0xB8E00000B91, - 0xB9200000B96, - 0xB9900000B9B, - 0xB9C00000B9D, - 0xB9E00000BA0, - 0xBA300000BA5, - 0xBA800000BAB, - 0xBAE00000BBA, - 0xBBE00000BC3, - 0xBC600000BC9, - 0xBCA00000BCE, - 0xBD000000BD1, - 0xBD700000BD8, - 0xBE600000BF0, - 0xC0000000C0D, - 0xC0E00000C11, - 0xC1200000C29, - 0xC2A00000C3A, - 0xC3C00000C45, - 0xC4600000C49, - 0xC4A00000C4E, - 0xC5500000C57, - 0xC5800000C5B, - 0xC5D00000C5E, - 0xC6000000C64, - 0xC6600000C70, - 0xC8000000C84, - 0xC8500000C8D, - 0xC8E00000C91, - 0xC9200000CA9, - 0xCAA00000CB4, - 0xCB500000CBA, - 0xCBC00000CC5, - 0xCC600000CC9, - 0xCCA00000CCE, - 0xCD500000CD7, - 0xCDD00000CDF, - 0xCE000000CE4, - 0xCE600000CF0, - 0xCF100000CF4, - 0xD0000000D0D, - 0xD0E00000D11, - 0xD1200000D45, - 0xD4600000D49, - 0xD4A00000D4F, - 0xD5400000D58, - 0xD5F00000D64, - 0xD6600000D70, - 0xD7A00000D80, - 0xD8100000D84, - 0xD8500000D97, - 0xD9A00000DB2, - 0xDB300000DBC, - 0xDBD00000DBE, - 0xDC000000DC7, - 0xDCA00000DCB, - 0xDCF00000DD5, - 0xDD600000DD7, - 0xDD800000DE0, - 0xDE600000DF0, - 0xDF200000DF4, - 0xE0100000E33, - 0xE3400000E3B, - 0xE4000000E4F, - 0xE5000000E5A, - 0xE8100000E83, - 0xE8400000E85, - 0xE8600000E8B, - 0xE8C00000EA4, - 0xEA500000EA6, - 0xEA700000EB3, - 0xEB400000EBE, - 0xEC000000EC5, - 0xEC600000EC7, - 0xEC800000ECF, - 0xED000000EDA, - 0xEDE00000EE0, - 0xF0000000F01, - 0xF0B00000F0C, - 0xF1800000F1A, - 0xF2000000F2A, - 0xF3500000F36, - 0xF3700000F38, - 0xF3900000F3A, - 0xF3E00000F43, - 0xF4400000F48, - 0xF4900000F4D, - 0xF4E00000F52, - 0xF5300000F57, - 0xF5800000F5C, - 0xF5D00000F69, - 0xF6A00000F6D, - 0xF7100000F73, - 0xF7400000F75, - 0xF7A00000F81, - 0xF8200000F85, - 0xF8600000F93, - 0xF9400000F98, - 0xF9900000F9D, - 0xF9E00000FA2, - 0xFA300000FA7, - 0xFA800000FAC, - 0xFAD00000FB9, - 0xFBA00000FBD, - 0xFC600000FC7, - 0x10000000104A, - 0x10500000109E, - 0x10D0000010FB, - 0x10FD00001100, - 0x120000001249, - 0x124A0000124E, - 0x125000001257, - 0x125800001259, - 0x125A0000125E, - 0x126000001289, - 0x128A0000128E, - 0x1290000012B1, - 0x12B2000012B6, - 0x12B8000012BF, - 0x12C0000012C1, - 0x12C2000012C6, - 0x12C8000012D7, - 0x12D800001311, - 0x131200001316, - 0x13180000135B, - 0x135D00001360, - 0x138000001390, - 0x13A0000013F6, - 0x14010000166D, - 0x166F00001680, - 0x16810000169B, - 0x16A0000016EB, - 0x16F1000016F9, - 0x170000001716, - 0x171F00001735, - 0x174000001754, - 0x17600000176D, - 0x176E00001771, - 0x177200001774, - 0x1780000017B4, - 0x17B6000017D4, - 0x17D7000017D8, - 0x17DC000017DE, - 0x17E0000017EA, - 0x18100000181A, - 0x182000001879, - 0x1880000018AB, - 0x18B0000018F6, - 0x19000000191F, - 0x19200000192C, - 0x19300000193C, - 0x19460000196E, - 0x197000001975, - 0x1980000019AC, - 0x19B0000019CA, - 0x19D0000019DA, - 0x1A0000001A1C, - 0x1A2000001A5F, - 0x1A6000001A7D, - 0x1A7F00001A8A, - 0x1A9000001A9A, - 0x1AA700001AA8, - 0x1AB000001ABE, - 0x1ABF00001ACF, - 0x1B0000001B4D, - 0x1B5000001B5A, - 0x1B6B00001B74, - 0x1B8000001BF4, - 0x1C0000001C38, - 0x1C4000001C4A, - 0x1C4D00001C7E, - 0x1CD000001CD3, - 0x1CD400001CFB, - 0x1D0000001D2C, - 0x1D2F00001D30, - 0x1D3B00001D3C, - 0x1D4E00001D4F, - 0x1D6B00001D78, - 0x1D7900001D9B, - 0x1DC000001E00, - 0x1E0100001E02, - 0x1E0300001E04, - 0x1E0500001E06, - 0x1E0700001E08, - 0x1E0900001E0A, - 0x1E0B00001E0C, - 0x1E0D00001E0E, - 0x1E0F00001E10, - 0x1E1100001E12, - 0x1E1300001E14, - 0x1E1500001E16, - 0x1E1700001E18, - 0x1E1900001E1A, - 0x1E1B00001E1C, - 0x1E1D00001E1E, - 0x1E1F00001E20, - 0x1E2100001E22, - 0x1E2300001E24, - 0x1E2500001E26, - 0x1E2700001E28, - 0x1E2900001E2A, - 0x1E2B00001E2C, - 0x1E2D00001E2E, - 0x1E2F00001E30, - 0x1E3100001E32, - 0x1E3300001E34, - 0x1E3500001E36, - 0x1E3700001E38, - 0x1E3900001E3A, - 0x1E3B00001E3C, - 0x1E3D00001E3E, - 0x1E3F00001E40, - 0x1E4100001E42, - 0x1E4300001E44, - 0x1E4500001E46, - 0x1E4700001E48, - 0x1E4900001E4A, - 0x1E4B00001E4C, - 0x1E4D00001E4E, - 0x1E4F00001E50, - 0x1E5100001E52, - 0x1E5300001E54, - 0x1E5500001E56, - 0x1E5700001E58, - 0x1E5900001E5A, - 0x1E5B00001E5C, - 0x1E5D00001E5E, - 0x1E5F00001E60, - 0x1E6100001E62, - 0x1E6300001E64, - 0x1E6500001E66, - 0x1E6700001E68, - 0x1E6900001E6A, - 0x1E6B00001E6C, - 0x1E6D00001E6E, - 0x1E6F00001E70, - 0x1E7100001E72, - 0x1E7300001E74, - 0x1E7500001E76, - 0x1E7700001E78, - 0x1E7900001E7A, - 0x1E7B00001E7C, - 0x1E7D00001E7E, - 0x1E7F00001E80, - 0x1E8100001E82, - 0x1E8300001E84, - 0x1E8500001E86, - 0x1E8700001E88, - 0x1E8900001E8A, - 0x1E8B00001E8C, - 0x1E8D00001E8E, - 0x1E8F00001E90, - 0x1E9100001E92, - 0x1E9300001E94, - 0x1E9500001E9A, - 0x1E9C00001E9E, - 0x1E9F00001EA0, - 0x1EA100001EA2, - 0x1EA300001EA4, - 0x1EA500001EA6, - 0x1EA700001EA8, - 0x1EA900001EAA, - 0x1EAB00001EAC, - 0x1EAD00001EAE, - 0x1EAF00001EB0, - 0x1EB100001EB2, - 0x1EB300001EB4, - 0x1EB500001EB6, - 0x1EB700001EB8, - 0x1EB900001EBA, - 0x1EBB00001EBC, - 0x1EBD00001EBE, - 0x1EBF00001EC0, - 0x1EC100001EC2, - 0x1EC300001EC4, - 0x1EC500001EC6, - 0x1EC700001EC8, - 0x1EC900001ECA, - 0x1ECB00001ECC, - 0x1ECD00001ECE, - 0x1ECF00001ED0, - 0x1ED100001ED2, - 0x1ED300001ED4, - 0x1ED500001ED6, - 0x1ED700001ED8, - 0x1ED900001EDA, - 0x1EDB00001EDC, - 0x1EDD00001EDE, - 0x1EDF00001EE0, - 0x1EE100001EE2, - 0x1EE300001EE4, - 0x1EE500001EE6, - 0x1EE700001EE8, - 0x1EE900001EEA, - 0x1EEB00001EEC, - 0x1EED00001EEE, - 0x1EEF00001EF0, - 0x1EF100001EF2, - 0x1EF300001EF4, - 0x1EF500001EF6, - 0x1EF700001EF8, - 0x1EF900001EFA, - 0x1EFB00001EFC, - 0x1EFD00001EFE, - 0x1EFF00001F08, - 0x1F1000001F16, - 0x1F2000001F28, - 0x1F3000001F38, - 0x1F4000001F46, - 0x1F5000001F58, - 0x1F6000001F68, - 0x1F7000001F71, - 0x1F7200001F73, - 0x1F7400001F75, - 0x1F7600001F77, - 0x1F7800001F79, - 0x1F7A00001F7B, - 0x1F7C00001F7D, - 0x1FB000001FB2, - 0x1FB600001FB7, - 0x1FC600001FC7, - 0x1FD000001FD3, - 0x1FD600001FD8, - 0x1FE000001FE3, - 0x1FE400001FE8, - 0x1FF600001FF7, - 0x214E0000214F, - 0x218400002185, - 0x2C3000002C60, - 0x2C6100002C62, - 0x2C6500002C67, - 0x2C6800002C69, - 0x2C6A00002C6B, - 0x2C6C00002C6D, - 0x2C7100002C72, - 0x2C7300002C75, - 0x2C7600002C7C, - 0x2C8100002C82, - 0x2C8300002C84, - 0x2C8500002C86, - 0x2C8700002C88, - 0x2C8900002C8A, - 0x2C8B00002C8C, - 0x2C8D00002C8E, - 0x2C8F00002C90, - 0x2C9100002C92, - 0x2C9300002C94, - 0x2C9500002C96, - 0x2C9700002C98, - 0x2C9900002C9A, - 0x2C9B00002C9C, - 0x2C9D00002C9E, - 0x2C9F00002CA0, - 0x2CA100002CA2, - 0x2CA300002CA4, - 0x2CA500002CA6, - 0x2CA700002CA8, - 0x2CA900002CAA, - 0x2CAB00002CAC, - 0x2CAD00002CAE, - 0x2CAF00002CB0, - 0x2CB100002CB2, - 0x2CB300002CB4, - 0x2CB500002CB6, - 0x2CB700002CB8, - 0x2CB900002CBA, - 0x2CBB00002CBC, - 0x2CBD00002CBE, - 0x2CBF00002CC0, - 0x2CC100002CC2, - 0x2CC300002CC4, - 0x2CC500002CC6, - 0x2CC700002CC8, - 0x2CC900002CCA, - 0x2CCB00002CCC, - 0x2CCD00002CCE, - 0x2CCF00002CD0, - 0x2CD100002CD2, - 0x2CD300002CD4, - 0x2CD500002CD6, - 0x2CD700002CD8, - 0x2CD900002CDA, - 0x2CDB00002CDC, - 0x2CDD00002CDE, - 0x2CDF00002CE0, - 0x2CE100002CE2, - 0x2CE300002CE5, - 0x2CEC00002CED, - 0x2CEE00002CF2, - 0x2CF300002CF4, - 0x2D0000002D26, - 0x2D2700002D28, - 0x2D2D00002D2E, - 0x2D3000002D68, - 0x2D7F00002D97, - 0x2DA000002DA7, - 0x2DA800002DAF, - 0x2DB000002DB7, - 0x2DB800002DBF, - 0x2DC000002DC7, - 0x2DC800002DCF, - 0x2DD000002DD7, - 0x2DD800002DDF, - 0x2DE000002E00, - 0x2E2F00002E30, - 0x300500003008, - 0x302A0000302E, - 0x303C0000303D, - 0x304100003097, - 0x30990000309B, - 0x309D0000309F, - 0x30A1000030FB, - 0x30FC000030FF, - 0x310500003130, - 0x31A0000031C0, - 0x31F000003200, - 0x340000004DC0, - 0x4E000000A48D, - 0xA4D00000A4FE, - 0xA5000000A60D, - 0xA6100000A62C, - 0xA6410000A642, - 0xA6430000A644, - 0xA6450000A646, - 0xA6470000A648, - 0xA6490000A64A, - 0xA64B0000A64C, - 0xA64D0000A64E, - 0xA64F0000A650, - 0xA6510000A652, - 0xA6530000A654, - 0xA6550000A656, - 0xA6570000A658, - 0xA6590000A65A, - 0xA65B0000A65C, - 0xA65D0000A65E, - 0xA65F0000A660, - 0xA6610000A662, - 0xA6630000A664, - 0xA6650000A666, - 0xA6670000A668, - 0xA6690000A66A, - 0xA66B0000A66C, - 0xA66D0000A670, - 0xA6740000A67E, - 0xA67F0000A680, - 0xA6810000A682, - 0xA6830000A684, - 0xA6850000A686, - 0xA6870000A688, - 0xA6890000A68A, - 0xA68B0000A68C, - 0xA68D0000A68E, - 0xA68F0000A690, - 0xA6910000A692, - 0xA6930000A694, - 0xA6950000A696, - 0xA6970000A698, - 0xA6990000A69A, - 0xA69B0000A69C, - 0xA69E0000A6E6, - 0xA6F00000A6F2, - 0xA7170000A720, - 0xA7230000A724, - 0xA7250000A726, - 0xA7270000A728, - 0xA7290000A72A, - 0xA72B0000A72C, - 0xA72D0000A72E, - 0xA72F0000A732, - 0xA7330000A734, - 0xA7350000A736, - 0xA7370000A738, - 0xA7390000A73A, - 0xA73B0000A73C, - 0xA73D0000A73E, - 0xA73F0000A740, - 0xA7410000A742, - 0xA7430000A744, - 0xA7450000A746, - 0xA7470000A748, - 0xA7490000A74A, - 0xA74B0000A74C, - 0xA74D0000A74E, - 0xA74F0000A750, - 0xA7510000A752, - 0xA7530000A754, - 0xA7550000A756, - 0xA7570000A758, - 0xA7590000A75A, - 0xA75B0000A75C, - 0xA75D0000A75E, - 0xA75F0000A760, - 0xA7610000A762, - 0xA7630000A764, - 0xA7650000A766, - 0xA7670000A768, - 0xA7690000A76A, - 0xA76B0000A76C, - 0xA76D0000A76E, - 0xA76F0000A770, - 0xA7710000A779, - 0xA77A0000A77B, - 0xA77C0000A77D, - 0xA77F0000A780, - 0xA7810000A782, - 0xA7830000A784, - 0xA7850000A786, - 0xA7870000A789, - 0xA78C0000A78D, - 0xA78E0000A790, - 0xA7910000A792, - 0xA7930000A796, - 0xA7970000A798, - 0xA7990000A79A, - 0xA79B0000A79C, - 0xA79D0000A79E, - 0xA79F0000A7A0, - 0xA7A10000A7A2, - 0xA7A30000A7A4, - 0xA7A50000A7A6, - 0xA7A70000A7A8, - 0xA7A90000A7AA, - 0xA7AF0000A7B0, - 0xA7B50000A7B6, - 0xA7B70000A7B8, - 0xA7B90000A7BA, - 0xA7BB0000A7BC, - 0xA7BD0000A7BE, - 0xA7BF0000A7C0, - 0xA7C10000A7C2, - 0xA7C30000A7C4, - 0xA7C80000A7C9, - 0xA7CA0000A7CB, - 0xA7D10000A7D2, - 0xA7D30000A7D4, - 0xA7D50000A7D6, - 0xA7D70000A7D8, - 0xA7D90000A7DA, - 0xA7F60000A7F8, - 0xA7FA0000A828, - 0xA82C0000A82D, - 0xA8400000A874, - 0xA8800000A8C6, - 0xA8D00000A8DA, - 0xA8E00000A8F8, - 0xA8FB0000A8FC, - 0xA8FD0000A92E, - 0xA9300000A954, - 0xA9800000A9C1, - 0xA9CF0000A9DA, - 0xA9E00000A9FF, - 0xAA000000AA37, - 0xAA400000AA4E, - 0xAA500000AA5A, - 0xAA600000AA77, - 0xAA7A0000AAC3, - 0xAADB0000AADE, - 0xAAE00000AAF0, - 0xAAF20000AAF7, - 0xAB010000AB07, - 0xAB090000AB0F, - 0xAB110000AB17, - 0xAB200000AB27, - 0xAB280000AB2F, - 0xAB300000AB5B, - 0xAB600000AB69, - 0xABC00000ABEB, - 0xABEC0000ABEE, - 0xABF00000ABFA, - 0xAC000000D7A4, - 0xFA0E0000FA10, - 0xFA110000FA12, - 0xFA130000FA15, - 0xFA1F0000FA20, - 0xFA210000FA22, - 0xFA230000FA25, - 0xFA270000FA2A, - 0xFB1E0000FB1F, - 0xFE200000FE30, - 0xFE730000FE74, - 0x100000001000C, - 0x1000D00010027, - 0x100280001003B, - 0x1003C0001003E, - 0x1003F0001004E, - 0x100500001005E, - 0x10080000100FB, - 0x101FD000101FE, - 0x102800001029D, - 0x102A0000102D1, - 0x102E0000102E1, - 0x1030000010320, - 0x1032D00010341, - 0x103420001034A, - 0x103500001037B, - 0x103800001039E, - 0x103A0000103C4, - 0x103C8000103D0, - 0x104280001049E, - 0x104A0000104AA, - 0x104D8000104FC, - 0x1050000010528, - 0x1053000010564, - 0x10597000105A2, - 0x105A3000105B2, - 0x105B3000105BA, - 0x105BB000105BD, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1078000010781, - 0x1080000010806, - 0x1080800010809, - 0x1080A00010836, - 0x1083700010839, - 0x1083C0001083D, - 0x1083F00010856, - 0x1086000010877, - 0x108800001089F, - 0x108E0000108F3, - 0x108F4000108F6, - 0x1090000010916, - 0x109200001093A, - 0x10980000109B8, - 0x109BE000109C0, - 0x10A0000010A04, - 0x10A0500010A07, - 0x10A0C00010A14, - 0x10A1500010A18, - 0x10A1900010A36, - 0x10A3800010A3B, - 0x10A3F00010A40, - 0x10A6000010A7D, - 0x10A8000010A9D, - 0x10AC000010AC8, - 0x10AC900010AE7, - 0x10B0000010B36, - 0x10B4000010B56, - 0x10B6000010B73, - 0x10B8000010B92, - 0x10C0000010C49, - 0x10CC000010CF3, - 0x10D0000010D28, - 0x10D3000010D3A, - 0x10E8000010EAA, - 0x10EAB00010EAD, - 0x10EB000010EB2, - 0x10EFD00010F1D, - 0x10F2700010F28, - 0x10F3000010F51, - 0x10F7000010F86, - 0x10FB000010FC5, - 0x10FE000010FF7, - 0x1100000011047, - 0x1106600011076, - 0x1107F000110BB, - 0x110C2000110C3, - 0x110D0000110E9, - 0x110F0000110FA, - 0x1110000011135, - 0x1113600011140, - 0x1114400011148, - 0x1115000011174, - 0x1117600011177, - 0x11180000111C5, - 0x111C9000111CD, - 0x111CE000111DB, - 0x111DC000111DD, - 0x1120000011212, - 0x1121300011238, - 0x1123E00011242, - 0x1128000011287, - 0x1128800011289, - 0x1128A0001128E, - 0x1128F0001129E, - 0x1129F000112A9, - 0x112B0000112EB, - 0x112F0000112FA, - 0x1130000011304, - 0x113050001130D, - 0x1130F00011311, - 0x1131300011329, - 0x1132A00011331, - 0x1133200011334, - 0x113350001133A, - 0x1133B00011345, - 0x1134700011349, - 0x1134B0001134E, - 0x1135000011351, - 0x1135700011358, - 0x1135D00011364, - 0x113660001136D, - 0x1137000011375, - 0x114000001144B, - 0x114500001145A, - 0x1145E00011462, - 0x11480000114C6, - 0x114C7000114C8, - 0x114D0000114DA, - 0x11580000115B6, - 0x115B8000115C1, - 0x115D8000115DE, - 0x1160000011641, - 0x1164400011645, - 0x116500001165A, - 0x11680000116B9, - 0x116C0000116CA, - 0x117000001171B, - 0x1171D0001172C, - 0x117300001173A, - 0x1174000011747, - 0x118000001183B, - 0x118C0000118EA, - 0x118FF00011907, - 0x119090001190A, - 0x1190C00011914, - 0x1191500011917, - 0x1191800011936, - 0x1193700011939, - 0x1193B00011944, - 0x119500001195A, - 0x119A0000119A8, - 0x119AA000119D8, - 0x119DA000119E2, - 0x119E3000119E5, - 0x11A0000011A3F, - 0x11A4700011A48, - 0x11A5000011A9A, - 0x11A9D00011A9E, - 0x11AB000011AF9, - 0x11C0000011C09, - 0x11C0A00011C37, - 0x11C3800011C41, - 0x11C5000011C5A, - 0x11C7200011C90, - 0x11C9200011CA8, - 0x11CA900011CB7, - 0x11D0000011D07, - 0x11D0800011D0A, - 0x11D0B00011D37, - 0x11D3A00011D3B, - 0x11D3C00011D3E, - 0x11D3F00011D48, - 0x11D5000011D5A, - 0x11D6000011D66, - 0x11D6700011D69, - 0x11D6A00011D8F, - 0x11D9000011D92, - 0x11D9300011D99, - 0x11DA000011DAA, - 0x11EE000011EF7, - 0x11F0000011F11, - 0x11F1200011F3B, - 0x11F3E00011F43, - 0x11F5000011F5A, - 0x11FB000011FB1, - 0x120000001239A, - 0x1248000012544, - 0x12F9000012FF1, - 0x1300000013430, - 0x1344000013456, - 0x1440000014647, - 0x1680000016A39, - 0x16A4000016A5F, - 0x16A6000016A6A, - 0x16A7000016ABF, - 0x16AC000016ACA, - 0x16AD000016AEE, - 0x16AF000016AF5, - 0x16B0000016B37, - 0x16B4000016B44, - 0x16B5000016B5A, - 0x16B6300016B78, - 0x16B7D00016B90, - 0x16E6000016E80, - 0x16F0000016F4B, - 0x16F4F00016F88, - 0x16F8F00016FA0, - 0x16FE000016FE2, - 0x16FE300016FE5, - 0x16FF000016FF2, - 0x17000000187F8, - 0x1880000018CD6, - 0x18D0000018D09, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B123, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1B1550001B156, - 0x1B1640001B168, - 0x1B1700001B2FC, - 0x1BC000001BC6B, - 0x1BC700001BC7D, - 0x1BC800001BC89, - 0x1BC900001BC9A, - 0x1BC9D0001BC9F, - 0x1CF000001CF2E, - 0x1CF300001CF47, - 0x1DA000001DA37, - 0x1DA3B0001DA6D, - 0x1DA750001DA76, - 0x1DA840001DA85, - 0x1DA9B0001DAA0, - 0x1DAA10001DAB0, - 0x1DF000001DF1F, - 0x1DF250001DF2B, - 0x1E0000001E007, - 0x1E0080001E019, - 0x1E01B0001E022, - 0x1E0230001E025, - 0x1E0260001E02B, - 0x1E08F0001E090, - 0x1E1000001E12D, - 0x1E1300001E13E, - 0x1E1400001E14A, - 0x1E14E0001E14F, - 0x1E2900001E2AF, - 0x1E2C00001E2FA, - 0x1E4D00001E4FA, - 0x1E7E00001E7E7, - 0x1E7E80001E7EC, - 0x1E7ED0001E7EF, - 0x1E7F00001E7FF, - 0x1E8000001E8C5, - 0x1E8D00001E8D7, - 0x1E9220001E94C, - 0x1E9500001E95A, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x300000003134B, - 0x31350000323B0, - ), - "CONTEXTJ": (0x200C0000200E,), - "CONTEXTO": ( - 0xB7000000B8, - 0x37500000376, - 0x5F3000005F5, - 0x6600000066A, - 0x6F0000006FA, - 0x30FB000030FC, - ), -} diff --git a/venv/lib/python3.10/site-packages/idna/intranges.py b/venv/lib/python3.10/site-packages/idna/intranges.py deleted file mode 100644 index 7bfaa8d..0000000 --- a/venv/lib/python3.10/site-packages/idna/intranges.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect -from typing import List, Tuple - - -def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i + 1 < len(sorted_list): - if sorted_list[i] == sorted_list[i + 1] - 1: - continue - current_range = sorted_list[last_write + 1 : i + 1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - - -def _encode_range(start: int, end: int) -> int: - return (start << 32) | end - - -def _decode_range(r: int) -> Tuple[int, int]: - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos - 1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/venv/lib/python3.10/site-packages/idna/package_data.py b/venv/lib/python3.10/site-packages/idna/package_data.py deleted file mode 100644 index 514ff7e..0000000 --- a/venv/lib/python3.10/site-packages/idna/package_data.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "3.10" diff --git a/venv/lib/python3.10/site-packages/idna/py.typed b/venv/lib/python3.10/site-packages/idna/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/idna/uts46data.py b/venv/lib/python3.10/site-packages/idna/uts46data.py deleted file mode 100644 index eb89432..0000000 --- a/venv/lib/python3.10/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8681 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -from typing import List, Tuple, Union - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "15.1.0" - - -def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x0, "3"), - (0x1, "3"), - (0x2, "3"), - (0x3, "3"), - (0x4, "3"), - (0x5, "3"), - (0x6, "3"), - (0x7, "3"), - (0x8, "3"), - (0x9, "3"), - (0xA, "3"), - (0xB, "3"), - (0xC, "3"), - (0xD, "3"), - (0xE, "3"), - (0xF, "3"), - (0x10, "3"), - (0x11, "3"), - (0x12, "3"), - (0x13, "3"), - (0x14, "3"), - (0x15, "3"), - (0x16, "3"), - (0x17, "3"), - (0x18, "3"), - (0x19, "3"), - (0x1A, "3"), - (0x1B, "3"), - (0x1C, "3"), - (0x1D, "3"), - (0x1E, "3"), - (0x1F, "3"), - (0x20, "3"), - (0x21, "3"), - (0x22, "3"), - (0x23, "3"), - (0x24, "3"), - (0x25, "3"), - (0x26, "3"), - (0x27, "3"), - (0x28, "3"), - (0x29, "3"), - (0x2A, "3"), - (0x2B, "3"), - (0x2C, "3"), - (0x2D, "V"), - (0x2E, "V"), - (0x2F, "3"), - (0x30, "V"), - (0x31, "V"), - (0x32, "V"), - (0x33, "V"), - (0x34, "V"), - (0x35, "V"), - (0x36, "V"), - (0x37, "V"), - (0x38, "V"), - (0x39, "V"), - (0x3A, "3"), - (0x3B, "3"), - (0x3C, "3"), - (0x3D, "3"), - (0x3E, "3"), - (0x3F, "3"), - (0x40, "3"), - (0x41, "M", "a"), - (0x42, "M", "b"), - (0x43, "M", "c"), - (0x44, "M", "d"), - (0x45, "M", "e"), - (0x46, "M", "f"), - (0x47, "M", "g"), - (0x48, "M", "h"), - (0x49, "M", "i"), - (0x4A, "M", "j"), - (0x4B, "M", "k"), - (0x4C, "M", "l"), - (0x4D, "M", "m"), - (0x4E, "M", "n"), - (0x4F, "M", "o"), - (0x50, "M", "p"), - (0x51, "M", "q"), - (0x52, "M", "r"), - (0x53, "M", "s"), - (0x54, "M", "t"), - (0x55, "M", "u"), - (0x56, "M", "v"), - (0x57, "M", "w"), - (0x58, "M", "x"), - (0x59, "M", "y"), - (0x5A, "M", "z"), - (0x5B, "3"), - (0x5C, "3"), - (0x5D, "3"), - (0x5E, "3"), - (0x5F, "3"), - (0x60, "3"), - (0x61, "V"), - (0x62, "V"), - (0x63, "V"), - ] - - -def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x64, "V"), - (0x65, "V"), - (0x66, "V"), - (0x67, "V"), - (0x68, "V"), - (0x69, "V"), - (0x6A, "V"), - (0x6B, "V"), - (0x6C, "V"), - (0x6D, "V"), - (0x6E, "V"), - (0x6F, "V"), - (0x70, "V"), - (0x71, "V"), - (0x72, "V"), - (0x73, "V"), - (0x74, "V"), - (0x75, "V"), - (0x76, "V"), - (0x77, "V"), - (0x78, "V"), - (0x79, "V"), - (0x7A, "V"), - (0x7B, "3"), - (0x7C, "3"), - (0x7D, "3"), - (0x7E, "3"), - (0x7F, "3"), - (0x80, "X"), - (0x81, "X"), - (0x82, "X"), - (0x83, "X"), - (0x84, "X"), - (0x85, "X"), - (0x86, "X"), - (0x87, "X"), - (0x88, "X"), - (0x89, "X"), - (0x8A, "X"), - (0x8B, "X"), - (0x8C, "X"), - (0x8D, "X"), - (0x8E, "X"), - (0x8F, "X"), - (0x90, "X"), - (0x91, "X"), - (0x92, "X"), - (0x93, "X"), - (0x94, "X"), - (0x95, "X"), - (0x96, "X"), - (0x97, "X"), - (0x98, "X"), - (0x99, "X"), - (0x9A, "X"), - (0x9B, "X"), - (0x9C, "X"), - (0x9D, "X"), - (0x9E, "X"), - (0x9F, "X"), - (0xA0, "3", " "), - (0xA1, "V"), - (0xA2, "V"), - (0xA3, "V"), - (0xA4, "V"), - (0xA5, "V"), - (0xA6, "V"), - (0xA7, "V"), - (0xA8, "3", " ̈"), - (0xA9, "V"), - (0xAA, "M", "a"), - (0xAB, "V"), - (0xAC, "V"), - (0xAD, "I"), - (0xAE, "V"), - (0xAF, "3", " ̄"), - (0xB0, "V"), - (0xB1, "V"), - (0xB2, "M", "2"), - (0xB3, "M", "3"), - (0xB4, "3", " ́"), - (0xB5, "M", "μ"), - (0xB6, "V"), - (0xB7, "V"), - (0xB8, "3", " ̧"), - (0xB9, "M", "1"), - (0xBA, "M", "o"), - (0xBB, "V"), - (0xBC, "M", "1⁄4"), - (0xBD, "M", "1⁄2"), - (0xBE, "M", "3⁄4"), - (0xBF, "V"), - (0xC0, "M", "à"), - (0xC1, "M", "á"), - (0xC2, "M", "â"), - (0xC3, "M", "ã"), - (0xC4, "M", "ä"), - (0xC5, "M", "å"), - (0xC6, "M", "æ"), - (0xC7, "M", "ç"), - ] - - -def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC8, "M", "è"), - (0xC9, "M", "é"), - (0xCA, "M", "ê"), - (0xCB, "M", "ë"), - (0xCC, "M", "ì"), - (0xCD, "M", "í"), - (0xCE, "M", "î"), - (0xCF, "M", "ï"), - (0xD0, "M", "ð"), - (0xD1, "M", "ñ"), - (0xD2, "M", "ò"), - (0xD3, "M", "ó"), - (0xD4, "M", "ô"), - (0xD5, "M", "õ"), - (0xD6, "M", "ö"), - (0xD7, "V"), - (0xD8, "M", "ø"), - (0xD9, "M", "ù"), - (0xDA, "M", "ú"), - (0xDB, "M", "û"), - (0xDC, "M", "ü"), - (0xDD, "M", "ý"), - (0xDE, "M", "þ"), - (0xDF, "D", "ss"), - (0xE0, "V"), - (0xE1, "V"), - (0xE2, "V"), - (0xE3, "V"), - (0xE4, "V"), - (0xE5, "V"), - (0xE6, "V"), - (0xE7, "V"), - (0xE8, "V"), - (0xE9, "V"), - (0xEA, "V"), - (0xEB, "V"), - (0xEC, "V"), - (0xED, "V"), - (0xEE, "V"), - (0xEF, "V"), - (0xF0, "V"), - (0xF1, "V"), - (0xF2, "V"), - (0xF3, "V"), - (0xF4, "V"), - (0xF5, "V"), - (0xF6, "V"), - (0xF7, "V"), - (0xF8, "V"), - (0xF9, "V"), - (0xFA, "V"), - (0xFB, "V"), - (0xFC, "V"), - (0xFD, "V"), - (0xFE, "V"), - (0xFF, "V"), - (0x100, "M", "ā"), - (0x101, "V"), - (0x102, "M", "ă"), - (0x103, "V"), - (0x104, "M", "ą"), - (0x105, "V"), - (0x106, "M", "ć"), - (0x107, "V"), - (0x108, "M", "ĉ"), - (0x109, "V"), - (0x10A, "M", "ċ"), - (0x10B, "V"), - (0x10C, "M", "č"), - (0x10D, "V"), - (0x10E, "M", "ď"), - (0x10F, "V"), - (0x110, "M", "đ"), - (0x111, "V"), - (0x112, "M", "ē"), - (0x113, "V"), - (0x114, "M", "ĕ"), - (0x115, "V"), - (0x116, "M", "ė"), - (0x117, "V"), - (0x118, "M", "ę"), - (0x119, "V"), - (0x11A, "M", "ě"), - (0x11B, "V"), - (0x11C, "M", "ĝ"), - (0x11D, "V"), - (0x11E, "M", "ğ"), - (0x11F, "V"), - (0x120, "M", "ġ"), - (0x121, "V"), - (0x122, "M", "ģ"), - (0x123, "V"), - (0x124, "M", "ĥ"), - (0x125, "V"), - (0x126, "M", "ħ"), - (0x127, "V"), - (0x128, "M", "ĩ"), - (0x129, "V"), - (0x12A, "M", "ī"), - (0x12B, "V"), - ] - - -def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x12C, "M", "ĭ"), - (0x12D, "V"), - (0x12E, "M", "į"), - (0x12F, "V"), - (0x130, "M", "i̇"), - (0x131, "V"), - (0x132, "M", "ij"), - (0x134, "M", "ĵ"), - (0x135, "V"), - (0x136, "M", "ķ"), - (0x137, "V"), - (0x139, "M", "ĺ"), - (0x13A, "V"), - (0x13B, "M", "ļ"), - (0x13C, "V"), - (0x13D, "M", "ľ"), - (0x13E, "V"), - (0x13F, "M", "l·"), - (0x141, "M", "ł"), - (0x142, "V"), - (0x143, "M", "ń"), - (0x144, "V"), - (0x145, "M", "ņ"), - (0x146, "V"), - (0x147, "M", "ň"), - (0x148, "V"), - (0x149, "M", "ʼn"), - (0x14A, "M", "ŋ"), - (0x14B, "V"), - (0x14C, "M", "ō"), - (0x14D, "V"), - (0x14E, "M", "ŏ"), - (0x14F, "V"), - (0x150, "M", "ő"), - (0x151, "V"), - (0x152, "M", "œ"), - (0x153, "V"), - (0x154, "M", "ŕ"), - (0x155, "V"), - (0x156, "M", "ŗ"), - (0x157, "V"), - (0x158, "M", "ř"), - (0x159, "V"), - (0x15A, "M", "ś"), - (0x15B, "V"), - (0x15C, "M", "ŝ"), - (0x15D, "V"), - (0x15E, "M", "ş"), - (0x15F, "V"), - (0x160, "M", "š"), - (0x161, "V"), - (0x162, "M", "ţ"), - (0x163, "V"), - (0x164, "M", "ť"), - (0x165, "V"), - (0x166, "M", "ŧ"), - (0x167, "V"), - (0x168, "M", "ũ"), - (0x169, "V"), - (0x16A, "M", "ū"), - (0x16B, "V"), - (0x16C, "M", "ŭ"), - (0x16D, "V"), - (0x16E, "M", "ů"), - (0x16F, "V"), - (0x170, "M", "ű"), - (0x171, "V"), - (0x172, "M", "ų"), - (0x173, "V"), - (0x174, "M", "ŵ"), - (0x175, "V"), - (0x176, "M", "ŷ"), - (0x177, "V"), - (0x178, "M", "ÿ"), - (0x179, "M", "ź"), - (0x17A, "V"), - (0x17B, "M", "ż"), - (0x17C, "V"), - (0x17D, "M", "ž"), - (0x17E, "V"), - (0x17F, "M", "s"), - (0x180, "V"), - (0x181, "M", "ɓ"), - (0x182, "M", "ƃ"), - (0x183, "V"), - (0x184, "M", "ƅ"), - (0x185, "V"), - (0x186, "M", "ɔ"), - (0x187, "M", "ƈ"), - (0x188, "V"), - (0x189, "M", "ɖ"), - (0x18A, "M", "ɗ"), - (0x18B, "M", "ƌ"), - (0x18C, "V"), - (0x18E, "M", "ǝ"), - (0x18F, "M", "ə"), - (0x190, "M", "ɛ"), - (0x191, "M", "ƒ"), - (0x192, "V"), - (0x193, "M", "ɠ"), - ] - - -def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x194, "M", "ɣ"), - (0x195, "V"), - (0x196, "M", "ɩ"), - (0x197, "M", "ɨ"), - (0x198, "M", "ƙ"), - (0x199, "V"), - (0x19C, "M", "ɯ"), - (0x19D, "M", "ɲ"), - (0x19E, "V"), - (0x19F, "M", "ɵ"), - (0x1A0, "M", "ơ"), - (0x1A1, "V"), - (0x1A2, "M", "ƣ"), - (0x1A3, "V"), - (0x1A4, "M", "ƥ"), - (0x1A5, "V"), - (0x1A6, "M", "ʀ"), - (0x1A7, "M", "ƨ"), - (0x1A8, "V"), - (0x1A9, "M", "ʃ"), - (0x1AA, "V"), - (0x1AC, "M", "ƭ"), - (0x1AD, "V"), - (0x1AE, "M", "ʈ"), - (0x1AF, "M", "ư"), - (0x1B0, "V"), - (0x1B1, "M", "ʊ"), - (0x1B2, "M", "ʋ"), - (0x1B3, "M", "ƴ"), - (0x1B4, "V"), - (0x1B5, "M", "ƶ"), - (0x1B6, "V"), - (0x1B7, "M", "ʒ"), - (0x1B8, "M", "ƹ"), - (0x1B9, "V"), - (0x1BC, "M", "ƽ"), - (0x1BD, "V"), - (0x1C4, "M", "dž"), - (0x1C7, "M", "lj"), - (0x1CA, "M", "nj"), - (0x1CD, "M", "ǎ"), - (0x1CE, "V"), - (0x1CF, "M", "ǐ"), - (0x1D0, "V"), - (0x1D1, "M", "ǒ"), - (0x1D2, "V"), - (0x1D3, "M", "ǔ"), - (0x1D4, "V"), - (0x1D5, "M", "ǖ"), - (0x1D6, "V"), - (0x1D7, "M", "ǘ"), - (0x1D8, "V"), - (0x1D9, "M", "ǚ"), - (0x1DA, "V"), - (0x1DB, "M", "ǜ"), - (0x1DC, "V"), - (0x1DE, "M", "ǟ"), - (0x1DF, "V"), - (0x1E0, "M", "ǡ"), - (0x1E1, "V"), - (0x1E2, "M", "ǣ"), - (0x1E3, "V"), - (0x1E4, "M", "ǥ"), - (0x1E5, "V"), - (0x1E6, "M", "ǧ"), - (0x1E7, "V"), - (0x1E8, "M", "ǩ"), - (0x1E9, "V"), - (0x1EA, "M", "ǫ"), - (0x1EB, "V"), - (0x1EC, "M", "ǭ"), - (0x1ED, "V"), - (0x1EE, "M", "ǯ"), - (0x1EF, "V"), - (0x1F1, "M", "dz"), - (0x1F4, "M", "ǵ"), - (0x1F5, "V"), - (0x1F6, "M", "ƕ"), - (0x1F7, "M", "ƿ"), - (0x1F8, "M", "ǹ"), - (0x1F9, "V"), - (0x1FA, "M", "ǻ"), - (0x1FB, "V"), - (0x1FC, "M", "ǽ"), - (0x1FD, "V"), - (0x1FE, "M", "ǿ"), - (0x1FF, "V"), - (0x200, "M", "ȁ"), - (0x201, "V"), - (0x202, "M", "ȃ"), - (0x203, "V"), - (0x204, "M", "ȅ"), - (0x205, "V"), - (0x206, "M", "ȇ"), - (0x207, "V"), - (0x208, "M", "ȉ"), - (0x209, "V"), - (0x20A, "M", "ȋ"), - (0x20B, "V"), - (0x20C, "M", "ȍ"), - ] - - -def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x20D, "V"), - (0x20E, "M", "ȏ"), - (0x20F, "V"), - (0x210, "M", "ȑ"), - (0x211, "V"), - (0x212, "M", "ȓ"), - (0x213, "V"), - (0x214, "M", "ȕ"), - (0x215, "V"), - (0x216, "M", "ȗ"), - (0x217, "V"), - (0x218, "M", "ș"), - (0x219, "V"), - (0x21A, "M", "ț"), - (0x21B, "V"), - (0x21C, "M", "ȝ"), - (0x21D, "V"), - (0x21E, "M", "ȟ"), - (0x21F, "V"), - (0x220, "M", "ƞ"), - (0x221, "V"), - (0x222, "M", "ȣ"), - (0x223, "V"), - (0x224, "M", "ȥ"), - (0x225, "V"), - (0x226, "M", "ȧ"), - (0x227, "V"), - (0x228, "M", "ȩ"), - (0x229, "V"), - (0x22A, "M", "ȫ"), - (0x22B, "V"), - (0x22C, "M", "ȭ"), - (0x22D, "V"), - (0x22E, "M", "ȯ"), - (0x22F, "V"), - (0x230, "M", "ȱ"), - (0x231, "V"), - (0x232, "M", "ȳ"), - (0x233, "V"), - (0x23A, "M", "ⱥ"), - (0x23B, "M", "ȼ"), - (0x23C, "V"), - (0x23D, "M", "ƚ"), - (0x23E, "M", "ⱦ"), - (0x23F, "V"), - (0x241, "M", "ɂ"), - (0x242, "V"), - (0x243, "M", "ƀ"), - (0x244, "M", "ʉ"), - (0x245, "M", "ʌ"), - (0x246, "M", "ɇ"), - (0x247, "V"), - (0x248, "M", "ɉ"), - (0x249, "V"), - (0x24A, "M", "ɋ"), - (0x24B, "V"), - (0x24C, "M", "ɍ"), - (0x24D, "V"), - (0x24E, "M", "ɏ"), - (0x24F, "V"), - (0x2B0, "M", "h"), - (0x2B1, "M", "ɦ"), - (0x2B2, "M", "j"), - (0x2B3, "M", "r"), - (0x2B4, "M", "ɹ"), - (0x2B5, "M", "ɻ"), - (0x2B6, "M", "ʁ"), - (0x2B7, "M", "w"), - (0x2B8, "M", "y"), - (0x2B9, "V"), - (0x2D8, "3", " ̆"), - (0x2D9, "3", " ̇"), - (0x2DA, "3", " ̊"), - (0x2DB, "3", " ̨"), - (0x2DC, "3", " ̃"), - (0x2DD, "3", " ̋"), - (0x2DE, "V"), - (0x2E0, "M", "ɣ"), - (0x2E1, "M", "l"), - (0x2E2, "M", "s"), - (0x2E3, "M", "x"), - (0x2E4, "M", "ʕ"), - (0x2E5, "V"), - (0x340, "M", "̀"), - (0x341, "M", "́"), - (0x342, "V"), - (0x343, "M", "̓"), - (0x344, "M", "̈́"), - (0x345, "M", "ι"), - (0x346, "V"), - (0x34F, "I"), - (0x350, "V"), - (0x370, "M", "ͱ"), - (0x371, "V"), - (0x372, "M", "ͳ"), - (0x373, "V"), - (0x374, "M", "ʹ"), - (0x375, "V"), - (0x376, "M", "ͷ"), - (0x377, "V"), - ] - - -def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x378, "X"), - (0x37A, "3", " ι"), - (0x37B, "V"), - (0x37E, "3", ";"), - (0x37F, "M", "ϳ"), - (0x380, "X"), - (0x384, "3", " ́"), - (0x385, "3", " ̈́"), - (0x386, "M", "ά"), - (0x387, "M", "·"), - (0x388, "M", "έ"), - (0x389, "M", "ή"), - (0x38A, "M", "ί"), - (0x38B, "X"), - (0x38C, "M", "ό"), - (0x38D, "X"), - (0x38E, "M", "ύ"), - (0x38F, "M", "ώ"), - (0x390, "V"), - (0x391, "M", "α"), - (0x392, "M", "β"), - (0x393, "M", "γ"), - (0x394, "M", "δ"), - (0x395, "M", "ε"), - (0x396, "M", "ζ"), - (0x397, "M", "η"), - (0x398, "M", "θ"), - (0x399, "M", "ι"), - (0x39A, "M", "κ"), - (0x39B, "M", "λ"), - (0x39C, "M", "μ"), - (0x39D, "M", "ν"), - (0x39E, "M", "ξ"), - (0x39F, "M", "ο"), - (0x3A0, "M", "π"), - (0x3A1, "M", "ρ"), - (0x3A2, "X"), - (0x3A3, "M", "σ"), - (0x3A4, "M", "τ"), - (0x3A5, "M", "υ"), - (0x3A6, "M", "φ"), - (0x3A7, "M", "χ"), - (0x3A8, "M", "ψ"), - (0x3A9, "M", "ω"), - (0x3AA, "M", "ϊ"), - (0x3AB, "M", "ϋ"), - (0x3AC, "V"), - (0x3C2, "D", "σ"), - (0x3C3, "V"), - (0x3CF, "M", "ϗ"), - (0x3D0, "M", "β"), - (0x3D1, "M", "θ"), - (0x3D2, "M", "υ"), - (0x3D3, "M", "ύ"), - (0x3D4, "M", "ϋ"), - (0x3D5, "M", "φ"), - (0x3D6, "M", "π"), - (0x3D7, "V"), - (0x3D8, "M", "ϙ"), - (0x3D9, "V"), - (0x3DA, "M", "ϛ"), - (0x3DB, "V"), - (0x3DC, "M", "ϝ"), - (0x3DD, "V"), - (0x3DE, "M", "ϟ"), - (0x3DF, "V"), - (0x3E0, "M", "ϡ"), - (0x3E1, "V"), - (0x3E2, "M", "ϣ"), - (0x3E3, "V"), - (0x3E4, "M", "ϥ"), - (0x3E5, "V"), - (0x3E6, "M", "ϧ"), - (0x3E7, "V"), - (0x3E8, "M", "ϩ"), - (0x3E9, "V"), - (0x3EA, "M", "ϫ"), - (0x3EB, "V"), - (0x3EC, "M", "ϭ"), - (0x3ED, "V"), - (0x3EE, "M", "ϯ"), - (0x3EF, "V"), - (0x3F0, "M", "κ"), - (0x3F1, "M", "ρ"), - (0x3F2, "M", "σ"), - (0x3F3, "V"), - (0x3F4, "M", "θ"), - (0x3F5, "M", "ε"), - (0x3F6, "V"), - (0x3F7, "M", "ϸ"), - (0x3F8, "V"), - (0x3F9, "M", "σ"), - (0x3FA, "M", "ϻ"), - (0x3FB, "V"), - (0x3FD, "M", "ͻ"), - (0x3FE, "M", "ͼ"), - (0x3FF, "M", "ͽ"), - (0x400, "M", "ѐ"), - (0x401, "M", "ё"), - (0x402, "M", "ђ"), - ] - - -def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x403, "M", "ѓ"), - (0x404, "M", "є"), - (0x405, "M", "ѕ"), - (0x406, "M", "і"), - (0x407, "M", "ї"), - (0x408, "M", "ј"), - (0x409, "M", "љ"), - (0x40A, "M", "њ"), - (0x40B, "M", "ћ"), - (0x40C, "M", "ќ"), - (0x40D, "M", "ѝ"), - (0x40E, "M", "ў"), - (0x40F, "M", "џ"), - (0x410, "M", "а"), - (0x411, "M", "б"), - (0x412, "M", "в"), - (0x413, "M", "г"), - (0x414, "M", "д"), - (0x415, "M", "е"), - (0x416, "M", "ж"), - (0x417, "M", "з"), - (0x418, "M", "и"), - (0x419, "M", "й"), - (0x41A, "M", "к"), - (0x41B, "M", "л"), - (0x41C, "M", "м"), - (0x41D, "M", "н"), - (0x41E, "M", "о"), - (0x41F, "M", "п"), - (0x420, "M", "р"), - (0x421, "M", "с"), - (0x422, "M", "т"), - (0x423, "M", "у"), - (0x424, "M", "ф"), - (0x425, "M", "х"), - (0x426, "M", "ц"), - (0x427, "M", "ч"), - (0x428, "M", "ш"), - (0x429, "M", "щ"), - (0x42A, "M", "ъ"), - (0x42B, "M", "ы"), - (0x42C, "M", "ь"), - (0x42D, "M", "э"), - (0x42E, "M", "ю"), - (0x42F, "M", "я"), - (0x430, "V"), - (0x460, "M", "ѡ"), - (0x461, "V"), - (0x462, "M", "ѣ"), - (0x463, "V"), - (0x464, "M", "ѥ"), - (0x465, "V"), - (0x466, "M", "ѧ"), - (0x467, "V"), - (0x468, "M", "ѩ"), - (0x469, "V"), - (0x46A, "M", "ѫ"), - (0x46B, "V"), - (0x46C, "M", "ѭ"), - (0x46D, "V"), - (0x46E, "M", "ѯ"), - (0x46F, "V"), - (0x470, "M", "ѱ"), - (0x471, "V"), - (0x472, "M", "ѳ"), - (0x473, "V"), - (0x474, "M", "ѵ"), - (0x475, "V"), - (0x476, "M", "ѷ"), - (0x477, "V"), - (0x478, "M", "ѹ"), - (0x479, "V"), - (0x47A, "M", "ѻ"), - (0x47B, "V"), - (0x47C, "M", "ѽ"), - (0x47D, "V"), - (0x47E, "M", "ѿ"), - (0x47F, "V"), - (0x480, "M", "ҁ"), - (0x481, "V"), - (0x48A, "M", "ҋ"), - (0x48B, "V"), - (0x48C, "M", "ҍ"), - (0x48D, "V"), - (0x48E, "M", "ҏ"), - (0x48F, "V"), - (0x490, "M", "ґ"), - (0x491, "V"), - (0x492, "M", "ғ"), - (0x493, "V"), - (0x494, "M", "ҕ"), - (0x495, "V"), - (0x496, "M", "җ"), - (0x497, "V"), - (0x498, "M", "ҙ"), - (0x499, "V"), - (0x49A, "M", "қ"), - (0x49B, "V"), - (0x49C, "M", "ҝ"), - (0x49D, "V"), - ] - - -def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x49E, "M", "ҟ"), - (0x49F, "V"), - (0x4A0, "M", "ҡ"), - (0x4A1, "V"), - (0x4A2, "M", "ң"), - (0x4A3, "V"), - (0x4A4, "M", "ҥ"), - (0x4A5, "V"), - (0x4A6, "M", "ҧ"), - (0x4A7, "V"), - (0x4A8, "M", "ҩ"), - (0x4A9, "V"), - (0x4AA, "M", "ҫ"), - (0x4AB, "V"), - (0x4AC, "M", "ҭ"), - (0x4AD, "V"), - (0x4AE, "M", "ү"), - (0x4AF, "V"), - (0x4B0, "M", "ұ"), - (0x4B1, "V"), - (0x4B2, "M", "ҳ"), - (0x4B3, "V"), - (0x4B4, "M", "ҵ"), - (0x4B5, "V"), - (0x4B6, "M", "ҷ"), - (0x4B7, "V"), - (0x4B8, "M", "ҹ"), - (0x4B9, "V"), - (0x4BA, "M", "һ"), - (0x4BB, "V"), - (0x4BC, "M", "ҽ"), - (0x4BD, "V"), - (0x4BE, "M", "ҿ"), - (0x4BF, "V"), - (0x4C0, "X"), - (0x4C1, "M", "ӂ"), - (0x4C2, "V"), - (0x4C3, "M", "ӄ"), - (0x4C4, "V"), - (0x4C5, "M", "ӆ"), - (0x4C6, "V"), - (0x4C7, "M", "ӈ"), - (0x4C8, "V"), - (0x4C9, "M", "ӊ"), - (0x4CA, "V"), - (0x4CB, "M", "ӌ"), - (0x4CC, "V"), - (0x4CD, "M", "ӎ"), - (0x4CE, "V"), - (0x4D0, "M", "ӑ"), - (0x4D1, "V"), - (0x4D2, "M", "ӓ"), - (0x4D3, "V"), - (0x4D4, "M", "ӕ"), - (0x4D5, "V"), - (0x4D6, "M", "ӗ"), - (0x4D7, "V"), - (0x4D8, "M", "ә"), - (0x4D9, "V"), - (0x4DA, "M", "ӛ"), - (0x4DB, "V"), - (0x4DC, "M", "ӝ"), - (0x4DD, "V"), - (0x4DE, "M", "ӟ"), - (0x4DF, "V"), - (0x4E0, "M", "ӡ"), - (0x4E1, "V"), - (0x4E2, "M", "ӣ"), - (0x4E3, "V"), - (0x4E4, "M", "ӥ"), - (0x4E5, "V"), - (0x4E6, "M", "ӧ"), - (0x4E7, "V"), - (0x4E8, "M", "ө"), - (0x4E9, "V"), - (0x4EA, "M", "ӫ"), - (0x4EB, "V"), - (0x4EC, "M", "ӭ"), - (0x4ED, "V"), - (0x4EE, "M", "ӯ"), - (0x4EF, "V"), - (0x4F0, "M", "ӱ"), - (0x4F1, "V"), - (0x4F2, "M", "ӳ"), - (0x4F3, "V"), - (0x4F4, "M", "ӵ"), - (0x4F5, "V"), - (0x4F6, "M", "ӷ"), - (0x4F7, "V"), - (0x4F8, "M", "ӹ"), - (0x4F9, "V"), - (0x4FA, "M", "ӻ"), - (0x4FB, "V"), - (0x4FC, "M", "ӽ"), - (0x4FD, "V"), - (0x4FE, "M", "ӿ"), - (0x4FF, "V"), - (0x500, "M", "ԁ"), - (0x501, "V"), - (0x502, "M", "ԃ"), - ] - - -def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x503, "V"), - (0x504, "M", "ԅ"), - (0x505, "V"), - (0x506, "M", "ԇ"), - (0x507, "V"), - (0x508, "M", "ԉ"), - (0x509, "V"), - (0x50A, "M", "ԋ"), - (0x50B, "V"), - (0x50C, "M", "ԍ"), - (0x50D, "V"), - (0x50E, "M", "ԏ"), - (0x50F, "V"), - (0x510, "M", "ԑ"), - (0x511, "V"), - (0x512, "M", "ԓ"), - (0x513, "V"), - (0x514, "M", "ԕ"), - (0x515, "V"), - (0x516, "M", "ԗ"), - (0x517, "V"), - (0x518, "M", "ԙ"), - (0x519, "V"), - (0x51A, "M", "ԛ"), - (0x51B, "V"), - (0x51C, "M", "ԝ"), - (0x51D, "V"), - (0x51E, "M", "ԟ"), - (0x51F, "V"), - (0x520, "M", "ԡ"), - (0x521, "V"), - (0x522, "M", "ԣ"), - (0x523, "V"), - (0x524, "M", "ԥ"), - (0x525, "V"), - (0x526, "M", "ԧ"), - (0x527, "V"), - (0x528, "M", "ԩ"), - (0x529, "V"), - (0x52A, "M", "ԫ"), - (0x52B, "V"), - (0x52C, "M", "ԭ"), - (0x52D, "V"), - (0x52E, "M", "ԯ"), - (0x52F, "V"), - (0x530, "X"), - (0x531, "M", "ա"), - (0x532, "M", "բ"), - (0x533, "M", "գ"), - (0x534, "M", "դ"), - (0x535, "M", "ե"), - (0x536, "M", "զ"), - (0x537, "M", "է"), - (0x538, "M", "ը"), - (0x539, "M", "թ"), - (0x53A, "M", "ժ"), - (0x53B, "M", "ի"), - (0x53C, "M", "լ"), - (0x53D, "M", "խ"), - (0x53E, "M", "ծ"), - (0x53F, "M", "կ"), - (0x540, "M", "հ"), - (0x541, "M", "ձ"), - (0x542, "M", "ղ"), - (0x543, "M", "ճ"), - (0x544, "M", "մ"), - (0x545, "M", "յ"), - (0x546, "M", "ն"), - (0x547, "M", "շ"), - (0x548, "M", "ո"), - (0x549, "M", "չ"), - (0x54A, "M", "պ"), - (0x54B, "M", "ջ"), - (0x54C, "M", "ռ"), - (0x54D, "M", "ս"), - (0x54E, "M", "վ"), - (0x54F, "M", "տ"), - (0x550, "M", "ր"), - (0x551, "M", "ց"), - (0x552, "M", "ւ"), - (0x553, "M", "փ"), - (0x554, "M", "ք"), - (0x555, "M", "օ"), - (0x556, "M", "ֆ"), - (0x557, "X"), - (0x559, "V"), - (0x587, "M", "եւ"), - (0x588, "V"), - (0x58B, "X"), - (0x58D, "V"), - (0x590, "X"), - (0x591, "V"), - (0x5C8, "X"), - (0x5D0, "V"), - (0x5EB, "X"), - (0x5EF, "V"), - (0x5F5, "X"), - (0x606, "V"), - (0x61C, "X"), - (0x61D, "V"), - ] - - -def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x675, "M", "اٴ"), - (0x676, "M", "وٴ"), - (0x677, "M", "ۇٴ"), - (0x678, "M", "يٴ"), - (0x679, "V"), - (0x6DD, "X"), - (0x6DE, "V"), - (0x70E, "X"), - (0x710, "V"), - (0x74B, "X"), - (0x74D, "V"), - (0x7B2, "X"), - (0x7C0, "V"), - (0x7FB, "X"), - (0x7FD, "V"), - (0x82E, "X"), - (0x830, "V"), - (0x83F, "X"), - (0x840, "V"), - (0x85C, "X"), - (0x85E, "V"), - (0x85F, "X"), - (0x860, "V"), - (0x86B, "X"), - (0x870, "V"), - (0x88F, "X"), - (0x898, "V"), - (0x8E2, "X"), - (0x8E3, "V"), - (0x958, "M", "क़"), - (0x959, "M", "ख़"), - (0x95A, "M", "ग़"), - (0x95B, "M", "ज़"), - (0x95C, "M", "ड़"), - (0x95D, "M", "ढ़"), - (0x95E, "M", "फ़"), - (0x95F, "M", "य़"), - (0x960, "V"), - (0x984, "X"), - (0x985, "V"), - (0x98D, "X"), - (0x98F, "V"), - (0x991, "X"), - (0x993, "V"), - (0x9A9, "X"), - (0x9AA, "V"), - (0x9B1, "X"), - (0x9B2, "V"), - (0x9B3, "X"), - (0x9B6, "V"), - (0x9BA, "X"), - (0x9BC, "V"), - (0x9C5, "X"), - (0x9C7, "V"), - (0x9C9, "X"), - (0x9CB, "V"), - (0x9CF, "X"), - (0x9D7, "V"), - (0x9D8, "X"), - (0x9DC, "M", "ড়"), - (0x9DD, "M", "ঢ়"), - (0x9DE, "X"), - (0x9DF, "M", "য়"), - (0x9E0, "V"), - (0x9E4, "X"), - (0x9E6, "V"), - (0x9FF, "X"), - (0xA01, "V"), - (0xA04, "X"), - (0xA05, "V"), - (0xA0B, "X"), - (0xA0F, "V"), - (0xA11, "X"), - (0xA13, "V"), - (0xA29, "X"), - (0xA2A, "V"), - (0xA31, "X"), - (0xA32, "V"), - (0xA33, "M", "ਲ਼"), - (0xA34, "X"), - (0xA35, "V"), - (0xA36, "M", "ਸ਼"), - (0xA37, "X"), - (0xA38, "V"), - (0xA3A, "X"), - (0xA3C, "V"), - (0xA3D, "X"), - (0xA3E, "V"), - (0xA43, "X"), - (0xA47, "V"), - (0xA49, "X"), - (0xA4B, "V"), - (0xA4E, "X"), - (0xA51, "V"), - (0xA52, "X"), - (0xA59, "M", "ਖ਼"), - (0xA5A, "M", "ਗ਼"), - (0xA5B, "M", "ਜ਼"), - (0xA5C, "V"), - (0xA5D, "X"), - ] - - -def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA5E, "M", "ਫ਼"), - (0xA5F, "X"), - (0xA66, "V"), - (0xA77, "X"), - (0xA81, "V"), - (0xA84, "X"), - (0xA85, "V"), - (0xA8E, "X"), - (0xA8F, "V"), - (0xA92, "X"), - (0xA93, "V"), - (0xAA9, "X"), - (0xAAA, "V"), - (0xAB1, "X"), - (0xAB2, "V"), - (0xAB4, "X"), - (0xAB5, "V"), - (0xABA, "X"), - (0xABC, "V"), - (0xAC6, "X"), - (0xAC7, "V"), - (0xACA, "X"), - (0xACB, "V"), - (0xACE, "X"), - (0xAD0, "V"), - (0xAD1, "X"), - (0xAE0, "V"), - (0xAE4, "X"), - (0xAE6, "V"), - (0xAF2, "X"), - (0xAF9, "V"), - (0xB00, "X"), - (0xB01, "V"), - (0xB04, "X"), - (0xB05, "V"), - (0xB0D, "X"), - (0xB0F, "V"), - (0xB11, "X"), - (0xB13, "V"), - (0xB29, "X"), - (0xB2A, "V"), - (0xB31, "X"), - (0xB32, "V"), - (0xB34, "X"), - (0xB35, "V"), - (0xB3A, "X"), - (0xB3C, "V"), - (0xB45, "X"), - (0xB47, "V"), - (0xB49, "X"), - (0xB4B, "V"), - (0xB4E, "X"), - (0xB55, "V"), - (0xB58, "X"), - (0xB5C, "M", "ଡ଼"), - (0xB5D, "M", "ଢ଼"), - (0xB5E, "X"), - (0xB5F, "V"), - (0xB64, "X"), - (0xB66, "V"), - (0xB78, "X"), - (0xB82, "V"), - (0xB84, "X"), - (0xB85, "V"), - (0xB8B, "X"), - (0xB8E, "V"), - (0xB91, "X"), - (0xB92, "V"), - (0xB96, "X"), - (0xB99, "V"), - (0xB9B, "X"), - (0xB9C, "V"), - (0xB9D, "X"), - (0xB9E, "V"), - (0xBA0, "X"), - (0xBA3, "V"), - (0xBA5, "X"), - (0xBA8, "V"), - (0xBAB, "X"), - (0xBAE, "V"), - (0xBBA, "X"), - (0xBBE, "V"), - (0xBC3, "X"), - (0xBC6, "V"), - (0xBC9, "X"), - (0xBCA, "V"), - (0xBCE, "X"), - (0xBD0, "V"), - (0xBD1, "X"), - (0xBD7, "V"), - (0xBD8, "X"), - (0xBE6, "V"), - (0xBFB, "X"), - (0xC00, "V"), - (0xC0D, "X"), - (0xC0E, "V"), - (0xC11, "X"), - (0xC12, "V"), - (0xC29, "X"), - (0xC2A, "V"), - ] - - -def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC3A, "X"), - (0xC3C, "V"), - (0xC45, "X"), - (0xC46, "V"), - (0xC49, "X"), - (0xC4A, "V"), - (0xC4E, "X"), - (0xC55, "V"), - (0xC57, "X"), - (0xC58, "V"), - (0xC5B, "X"), - (0xC5D, "V"), - (0xC5E, "X"), - (0xC60, "V"), - (0xC64, "X"), - (0xC66, "V"), - (0xC70, "X"), - (0xC77, "V"), - (0xC8D, "X"), - (0xC8E, "V"), - (0xC91, "X"), - (0xC92, "V"), - (0xCA9, "X"), - (0xCAA, "V"), - (0xCB4, "X"), - (0xCB5, "V"), - (0xCBA, "X"), - (0xCBC, "V"), - (0xCC5, "X"), - (0xCC6, "V"), - (0xCC9, "X"), - (0xCCA, "V"), - (0xCCE, "X"), - (0xCD5, "V"), - (0xCD7, "X"), - (0xCDD, "V"), - (0xCDF, "X"), - (0xCE0, "V"), - (0xCE4, "X"), - (0xCE6, "V"), - (0xCF0, "X"), - (0xCF1, "V"), - (0xCF4, "X"), - (0xD00, "V"), - (0xD0D, "X"), - (0xD0E, "V"), - (0xD11, "X"), - (0xD12, "V"), - (0xD45, "X"), - (0xD46, "V"), - (0xD49, "X"), - (0xD4A, "V"), - (0xD50, "X"), - (0xD54, "V"), - (0xD64, "X"), - (0xD66, "V"), - (0xD80, "X"), - (0xD81, "V"), - (0xD84, "X"), - (0xD85, "V"), - (0xD97, "X"), - (0xD9A, "V"), - (0xDB2, "X"), - (0xDB3, "V"), - (0xDBC, "X"), - (0xDBD, "V"), - (0xDBE, "X"), - (0xDC0, "V"), - (0xDC7, "X"), - (0xDCA, "V"), - (0xDCB, "X"), - (0xDCF, "V"), - (0xDD5, "X"), - (0xDD6, "V"), - (0xDD7, "X"), - (0xDD8, "V"), - (0xDE0, "X"), - (0xDE6, "V"), - (0xDF0, "X"), - (0xDF2, "V"), - (0xDF5, "X"), - (0xE01, "V"), - (0xE33, "M", "ํา"), - (0xE34, "V"), - (0xE3B, "X"), - (0xE3F, "V"), - (0xE5C, "X"), - (0xE81, "V"), - (0xE83, "X"), - (0xE84, "V"), - (0xE85, "X"), - (0xE86, "V"), - (0xE8B, "X"), - (0xE8C, "V"), - (0xEA4, "X"), - (0xEA5, "V"), - (0xEA6, "X"), - (0xEA7, "V"), - (0xEB3, "M", "ໍາ"), - (0xEB4, "V"), - ] - - -def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xEBE, "X"), - (0xEC0, "V"), - (0xEC5, "X"), - (0xEC6, "V"), - (0xEC7, "X"), - (0xEC8, "V"), - (0xECF, "X"), - (0xED0, "V"), - (0xEDA, "X"), - (0xEDC, "M", "ຫນ"), - (0xEDD, "M", "ຫມ"), - (0xEDE, "V"), - (0xEE0, "X"), - (0xF00, "V"), - (0xF0C, "M", "་"), - (0xF0D, "V"), - (0xF43, "M", "གྷ"), - (0xF44, "V"), - (0xF48, "X"), - (0xF49, "V"), - (0xF4D, "M", "ཌྷ"), - (0xF4E, "V"), - (0xF52, "M", "དྷ"), - (0xF53, "V"), - (0xF57, "M", "བྷ"), - (0xF58, "V"), - (0xF5C, "M", "ཛྷ"), - (0xF5D, "V"), - (0xF69, "M", "ཀྵ"), - (0xF6A, "V"), - (0xF6D, "X"), - (0xF71, "V"), - (0xF73, "M", "ཱི"), - (0xF74, "V"), - (0xF75, "M", "ཱུ"), - (0xF76, "M", "ྲྀ"), - (0xF77, "M", "ྲཱྀ"), - (0xF78, "M", "ླྀ"), - (0xF79, "M", "ླཱྀ"), - (0xF7A, "V"), - (0xF81, "M", "ཱྀ"), - (0xF82, "V"), - (0xF93, "M", "ྒྷ"), - (0xF94, "V"), - (0xF98, "X"), - (0xF99, "V"), - (0xF9D, "M", "ྜྷ"), - (0xF9E, "V"), - (0xFA2, "M", "ྡྷ"), - (0xFA3, "V"), - (0xFA7, "M", "ྦྷ"), - (0xFA8, "V"), - (0xFAC, "M", "ྫྷ"), - (0xFAD, "V"), - (0xFB9, "M", "ྐྵ"), - (0xFBA, "V"), - (0xFBD, "X"), - (0xFBE, "V"), - (0xFCD, "X"), - (0xFCE, "V"), - (0xFDB, "X"), - (0x1000, "V"), - (0x10A0, "X"), - (0x10C7, "M", "ⴧ"), - (0x10C8, "X"), - (0x10CD, "M", "ⴭ"), - (0x10CE, "X"), - (0x10D0, "V"), - (0x10FC, "M", "ნ"), - (0x10FD, "V"), - (0x115F, "X"), - (0x1161, "V"), - (0x1249, "X"), - (0x124A, "V"), - (0x124E, "X"), - (0x1250, "V"), - (0x1257, "X"), - (0x1258, "V"), - (0x1259, "X"), - (0x125A, "V"), - (0x125E, "X"), - (0x1260, "V"), - (0x1289, "X"), - (0x128A, "V"), - (0x128E, "X"), - (0x1290, "V"), - (0x12B1, "X"), - (0x12B2, "V"), - (0x12B6, "X"), - (0x12B8, "V"), - (0x12BF, "X"), - (0x12C0, "V"), - (0x12C1, "X"), - (0x12C2, "V"), - (0x12C6, "X"), - (0x12C8, "V"), - (0x12D7, "X"), - (0x12D8, "V"), - (0x1311, "X"), - (0x1312, "V"), - ] - - -def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1316, "X"), - (0x1318, "V"), - (0x135B, "X"), - (0x135D, "V"), - (0x137D, "X"), - (0x1380, "V"), - (0x139A, "X"), - (0x13A0, "V"), - (0x13F6, "X"), - (0x13F8, "M", "Ᏸ"), - (0x13F9, "M", "Ᏹ"), - (0x13FA, "M", "Ᏺ"), - (0x13FB, "M", "Ᏻ"), - (0x13FC, "M", "Ᏼ"), - (0x13FD, "M", "Ᏽ"), - (0x13FE, "X"), - (0x1400, "V"), - (0x1680, "X"), - (0x1681, "V"), - (0x169D, "X"), - (0x16A0, "V"), - (0x16F9, "X"), - (0x1700, "V"), - (0x1716, "X"), - (0x171F, "V"), - (0x1737, "X"), - (0x1740, "V"), - (0x1754, "X"), - (0x1760, "V"), - (0x176D, "X"), - (0x176E, "V"), - (0x1771, "X"), - (0x1772, "V"), - (0x1774, "X"), - (0x1780, "V"), - (0x17B4, "X"), - (0x17B6, "V"), - (0x17DE, "X"), - (0x17E0, "V"), - (0x17EA, "X"), - (0x17F0, "V"), - (0x17FA, "X"), - (0x1800, "V"), - (0x1806, "X"), - (0x1807, "V"), - (0x180B, "I"), - (0x180E, "X"), - (0x180F, "I"), - (0x1810, "V"), - (0x181A, "X"), - (0x1820, "V"), - (0x1879, "X"), - (0x1880, "V"), - (0x18AB, "X"), - (0x18B0, "V"), - (0x18F6, "X"), - (0x1900, "V"), - (0x191F, "X"), - (0x1920, "V"), - (0x192C, "X"), - (0x1930, "V"), - (0x193C, "X"), - (0x1940, "V"), - (0x1941, "X"), - (0x1944, "V"), - (0x196E, "X"), - (0x1970, "V"), - (0x1975, "X"), - (0x1980, "V"), - (0x19AC, "X"), - (0x19B0, "V"), - (0x19CA, "X"), - (0x19D0, "V"), - (0x19DB, "X"), - (0x19DE, "V"), - (0x1A1C, "X"), - (0x1A1E, "V"), - (0x1A5F, "X"), - (0x1A60, "V"), - (0x1A7D, "X"), - (0x1A7F, "V"), - (0x1A8A, "X"), - (0x1A90, "V"), - (0x1A9A, "X"), - (0x1AA0, "V"), - (0x1AAE, "X"), - (0x1AB0, "V"), - (0x1ACF, "X"), - (0x1B00, "V"), - (0x1B4D, "X"), - (0x1B50, "V"), - (0x1B7F, "X"), - (0x1B80, "V"), - (0x1BF4, "X"), - (0x1BFC, "V"), - (0x1C38, "X"), - (0x1C3B, "V"), - (0x1C4A, "X"), - (0x1C4D, "V"), - (0x1C80, "M", "в"), - ] - - -def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1C81, "M", "д"), - (0x1C82, "M", "о"), - (0x1C83, "M", "с"), - (0x1C84, "M", "т"), - (0x1C86, "M", "ъ"), - (0x1C87, "M", "ѣ"), - (0x1C88, "M", "ꙋ"), - (0x1C89, "X"), - (0x1C90, "M", "ა"), - (0x1C91, "M", "ბ"), - (0x1C92, "M", "გ"), - (0x1C93, "M", "დ"), - (0x1C94, "M", "ე"), - (0x1C95, "M", "ვ"), - (0x1C96, "M", "ზ"), - (0x1C97, "M", "თ"), - (0x1C98, "M", "ი"), - (0x1C99, "M", "კ"), - (0x1C9A, "M", "ლ"), - (0x1C9B, "M", "მ"), - (0x1C9C, "M", "ნ"), - (0x1C9D, "M", "ო"), - (0x1C9E, "M", "პ"), - (0x1C9F, "M", "ჟ"), - (0x1CA0, "M", "რ"), - (0x1CA1, "M", "ს"), - (0x1CA2, "M", "ტ"), - (0x1CA3, "M", "უ"), - (0x1CA4, "M", "ფ"), - (0x1CA5, "M", "ქ"), - (0x1CA6, "M", "ღ"), - (0x1CA7, "M", "ყ"), - (0x1CA8, "M", "შ"), - (0x1CA9, "M", "ჩ"), - (0x1CAA, "M", "ც"), - (0x1CAB, "M", "ძ"), - (0x1CAC, "M", "წ"), - (0x1CAD, "M", "ჭ"), - (0x1CAE, "M", "ხ"), - (0x1CAF, "M", "ჯ"), - (0x1CB0, "M", "ჰ"), - (0x1CB1, "M", "ჱ"), - (0x1CB2, "M", "ჲ"), - (0x1CB3, "M", "ჳ"), - (0x1CB4, "M", "ჴ"), - (0x1CB5, "M", "ჵ"), - (0x1CB6, "M", "ჶ"), - (0x1CB7, "M", "ჷ"), - (0x1CB8, "M", "ჸ"), - (0x1CB9, "M", "ჹ"), - (0x1CBA, "M", "ჺ"), - (0x1CBB, "X"), - (0x1CBD, "M", "ჽ"), - (0x1CBE, "M", "ჾ"), - (0x1CBF, "M", "ჿ"), - (0x1CC0, "V"), - (0x1CC8, "X"), - (0x1CD0, "V"), - (0x1CFB, "X"), - (0x1D00, "V"), - (0x1D2C, "M", "a"), - (0x1D2D, "M", "æ"), - (0x1D2E, "M", "b"), - (0x1D2F, "V"), - (0x1D30, "M", "d"), - (0x1D31, "M", "e"), - (0x1D32, "M", "ǝ"), - (0x1D33, "M", "g"), - (0x1D34, "M", "h"), - (0x1D35, "M", "i"), - (0x1D36, "M", "j"), - (0x1D37, "M", "k"), - (0x1D38, "M", "l"), - (0x1D39, "M", "m"), - (0x1D3A, "M", "n"), - (0x1D3B, "V"), - (0x1D3C, "M", "o"), - (0x1D3D, "M", "ȣ"), - (0x1D3E, "M", "p"), - (0x1D3F, "M", "r"), - (0x1D40, "M", "t"), - (0x1D41, "M", "u"), - (0x1D42, "M", "w"), - (0x1D43, "M", "a"), - (0x1D44, "M", "ɐ"), - (0x1D45, "M", "ɑ"), - (0x1D46, "M", "ᴂ"), - (0x1D47, "M", "b"), - (0x1D48, "M", "d"), - (0x1D49, "M", "e"), - (0x1D4A, "M", "ə"), - (0x1D4B, "M", "ɛ"), - (0x1D4C, "M", "ɜ"), - (0x1D4D, "M", "g"), - (0x1D4E, "V"), - (0x1D4F, "M", "k"), - (0x1D50, "M", "m"), - (0x1D51, "M", "ŋ"), - (0x1D52, "M", "o"), - (0x1D53, "M", "ɔ"), - ] - - -def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D54, "M", "ᴖ"), - (0x1D55, "M", "ᴗ"), - (0x1D56, "M", "p"), - (0x1D57, "M", "t"), - (0x1D58, "M", "u"), - (0x1D59, "M", "ᴝ"), - (0x1D5A, "M", "ɯ"), - (0x1D5B, "M", "v"), - (0x1D5C, "M", "ᴥ"), - (0x1D5D, "M", "β"), - (0x1D5E, "M", "γ"), - (0x1D5F, "M", "δ"), - (0x1D60, "M", "φ"), - (0x1D61, "M", "χ"), - (0x1D62, "M", "i"), - (0x1D63, "M", "r"), - (0x1D64, "M", "u"), - (0x1D65, "M", "v"), - (0x1D66, "M", "β"), - (0x1D67, "M", "γ"), - (0x1D68, "M", "ρ"), - (0x1D69, "M", "φ"), - (0x1D6A, "M", "χ"), - (0x1D6B, "V"), - (0x1D78, "M", "н"), - (0x1D79, "V"), - (0x1D9B, "M", "ɒ"), - (0x1D9C, "M", "c"), - (0x1D9D, "M", "ɕ"), - (0x1D9E, "M", "ð"), - (0x1D9F, "M", "ɜ"), - (0x1DA0, "M", "f"), - (0x1DA1, "M", "ɟ"), - (0x1DA2, "M", "ɡ"), - (0x1DA3, "M", "ɥ"), - (0x1DA4, "M", "ɨ"), - (0x1DA5, "M", "ɩ"), - (0x1DA6, "M", "ɪ"), - (0x1DA7, "M", "ᵻ"), - (0x1DA8, "M", "ʝ"), - (0x1DA9, "M", "ɭ"), - (0x1DAA, "M", "ᶅ"), - (0x1DAB, "M", "ʟ"), - (0x1DAC, "M", "ɱ"), - (0x1DAD, "M", "ɰ"), - (0x1DAE, "M", "ɲ"), - (0x1DAF, "M", "ɳ"), - (0x1DB0, "M", "ɴ"), - (0x1DB1, "M", "ɵ"), - (0x1DB2, "M", "ɸ"), - (0x1DB3, "M", "ʂ"), - (0x1DB4, "M", "ʃ"), - (0x1DB5, "M", "ƫ"), - (0x1DB6, "M", "ʉ"), - (0x1DB7, "M", "ʊ"), - (0x1DB8, "M", "ᴜ"), - (0x1DB9, "M", "ʋ"), - (0x1DBA, "M", "ʌ"), - (0x1DBB, "M", "z"), - (0x1DBC, "M", "ʐ"), - (0x1DBD, "M", "ʑ"), - (0x1DBE, "M", "ʒ"), - (0x1DBF, "M", "θ"), - (0x1DC0, "V"), - (0x1E00, "M", "ḁ"), - (0x1E01, "V"), - (0x1E02, "M", "ḃ"), - (0x1E03, "V"), - (0x1E04, "M", "ḅ"), - (0x1E05, "V"), - (0x1E06, "M", "ḇ"), - (0x1E07, "V"), - (0x1E08, "M", "ḉ"), - (0x1E09, "V"), - (0x1E0A, "M", "ḋ"), - (0x1E0B, "V"), - (0x1E0C, "M", "ḍ"), - (0x1E0D, "V"), - (0x1E0E, "M", "ḏ"), - (0x1E0F, "V"), - (0x1E10, "M", "ḑ"), - (0x1E11, "V"), - (0x1E12, "M", "ḓ"), - (0x1E13, "V"), - (0x1E14, "M", "ḕ"), - (0x1E15, "V"), - (0x1E16, "M", "ḗ"), - (0x1E17, "V"), - (0x1E18, "M", "ḙ"), - (0x1E19, "V"), - (0x1E1A, "M", "ḛ"), - (0x1E1B, "V"), - (0x1E1C, "M", "ḝ"), - (0x1E1D, "V"), - (0x1E1E, "M", "ḟ"), - (0x1E1F, "V"), - (0x1E20, "M", "ḡ"), - (0x1E21, "V"), - (0x1E22, "M", "ḣ"), - (0x1E23, "V"), - ] - - -def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E24, "M", "ḥ"), - (0x1E25, "V"), - (0x1E26, "M", "ḧ"), - (0x1E27, "V"), - (0x1E28, "M", "ḩ"), - (0x1E29, "V"), - (0x1E2A, "M", "ḫ"), - (0x1E2B, "V"), - (0x1E2C, "M", "ḭ"), - (0x1E2D, "V"), - (0x1E2E, "M", "ḯ"), - (0x1E2F, "V"), - (0x1E30, "M", "ḱ"), - (0x1E31, "V"), - (0x1E32, "M", "ḳ"), - (0x1E33, "V"), - (0x1E34, "M", "ḵ"), - (0x1E35, "V"), - (0x1E36, "M", "ḷ"), - (0x1E37, "V"), - (0x1E38, "M", "ḹ"), - (0x1E39, "V"), - (0x1E3A, "M", "ḻ"), - (0x1E3B, "V"), - (0x1E3C, "M", "ḽ"), - (0x1E3D, "V"), - (0x1E3E, "M", "ḿ"), - (0x1E3F, "V"), - (0x1E40, "M", "ṁ"), - (0x1E41, "V"), - (0x1E42, "M", "ṃ"), - (0x1E43, "V"), - (0x1E44, "M", "ṅ"), - (0x1E45, "V"), - (0x1E46, "M", "ṇ"), - (0x1E47, "V"), - (0x1E48, "M", "ṉ"), - (0x1E49, "V"), - (0x1E4A, "M", "ṋ"), - (0x1E4B, "V"), - (0x1E4C, "M", "ṍ"), - (0x1E4D, "V"), - (0x1E4E, "M", "ṏ"), - (0x1E4F, "V"), - (0x1E50, "M", "ṑ"), - (0x1E51, "V"), - (0x1E52, "M", "ṓ"), - (0x1E53, "V"), - (0x1E54, "M", "ṕ"), - (0x1E55, "V"), - (0x1E56, "M", "ṗ"), - (0x1E57, "V"), - (0x1E58, "M", "ṙ"), - (0x1E59, "V"), - (0x1E5A, "M", "ṛ"), - (0x1E5B, "V"), - (0x1E5C, "M", "ṝ"), - (0x1E5D, "V"), - (0x1E5E, "M", "ṟ"), - (0x1E5F, "V"), - (0x1E60, "M", "ṡ"), - (0x1E61, "V"), - (0x1E62, "M", "ṣ"), - (0x1E63, "V"), - (0x1E64, "M", "ṥ"), - (0x1E65, "V"), - (0x1E66, "M", "ṧ"), - (0x1E67, "V"), - (0x1E68, "M", "ṩ"), - (0x1E69, "V"), - (0x1E6A, "M", "ṫ"), - (0x1E6B, "V"), - (0x1E6C, "M", "ṭ"), - (0x1E6D, "V"), - (0x1E6E, "M", "ṯ"), - (0x1E6F, "V"), - (0x1E70, "M", "ṱ"), - (0x1E71, "V"), - (0x1E72, "M", "ṳ"), - (0x1E73, "V"), - (0x1E74, "M", "ṵ"), - (0x1E75, "V"), - (0x1E76, "M", "ṷ"), - (0x1E77, "V"), - (0x1E78, "M", "ṹ"), - (0x1E79, "V"), - (0x1E7A, "M", "ṻ"), - (0x1E7B, "V"), - (0x1E7C, "M", "ṽ"), - (0x1E7D, "V"), - (0x1E7E, "M", "ṿ"), - (0x1E7F, "V"), - (0x1E80, "M", "ẁ"), - (0x1E81, "V"), - (0x1E82, "M", "ẃ"), - (0x1E83, "V"), - (0x1E84, "M", "ẅ"), - (0x1E85, "V"), - (0x1E86, "M", "ẇ"), - (0x1E87, "V"), - ] - - -def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E88, "M", "ẉ"), - (0x1E89, "V"), - (0x1E8A, "M", "ẋ"), - (0x1E8B, "V"), - (0x1E8C, "M", "ẍ"), - (0x1E8D, "V"), - (0x1E8E, "M", "ẏ"), - (0x1E8F, "V"), - (0x1E90, "M", "ẑ"), - (0x1E91, "V"), - (0x1E92, "M", "ẓ"), - (0x1E93, "V"), - (0x1E94, "M", "ẕ"), - (0x1E95, "V"), - (0x1E9A, "M", "aʾ"), - (0x1E9B, "M", "ṡ"), - (0x1E9C, "V"), - (0x1E9E, "M", "ß"), - (0x1E9F, "V"), - (0x1EA0, "M", "ạ"), - (0x1EA1, "V"), - (0x1EA2, "M", "ả"), - (0x1EA3, "V"), - (0x1EA4, "M", "ấ"), - (0x1EA5, "V"), - (0x1EA6, "M", "ầ"), - (0x1EA7, "V"), - (0x1EA8, "M", "ẩ"), - (0x1EA9, "V"), - (0x1EAA, "M", "ẫ"), - (0x1EAB, "V"), - (0x1EAC, "M", "ậ"), - (0x1EAD, "V"), - (0x1EAE, "M", "ắ"), - (0x1EAF, "V"), - (0x1EB0, "M", "ằ"), - (0x1EB1, "V"), - (0x1EB2, "M", "ẳ"), - (0x1EB3, "V"), - (0x1EB4, "M", "ẵ"), - (0x1EB5, "V"), - (0x1EB6, "M", "ặ"), - (0x1EB7, "V"), - (0x1EB8, "M", "ẹ"), - (0x1EB9, "V"), - (0x1EBA, "M", "ẻ"), - (0x1EBB, "V"), - (0x1EBC, "M", "ẽ"), - (0x1EBD, "V"), - (0x1EBE, "M", "ế"), - (0x1EBF, "V"), - (0x1EC0, "M", "ề"), - (0x1EC1, "V"), - (0x1EC2, "M", "ể"), - (0x1EC3, "V"), - (0x1EC4, "M", "ễ"), - (0x1EC5, "V"), - (0x1EC6, "M", "ệ"), - (0x1EC7, "V"), - (0x1EC8, "M", "ỉ"), - (0x1EC9, "V"), - (0x1ECA, "M", "ị"), - (0x1ECB, "V"), - (0x1ECC, "M", "ọ"), - (0x1ECD, "V"), - (0x1ECE, "M", "ỏ"), - (0x1ECF, "V"), - (0x1ED0, "M", "ố"), - (0x1ED1, "V"), - (0x1ED2, "M", "ồ"), - (0x1ED3, "V"), - (0x1ED4, "M", "ổ"), - (0x1ED5, "V"), - (0x1ED6, "M", "ỗ"), - (0x1ED7, "V"), - (0x1ED8, "M", "ộ"), - (0x1ED9, "V"), - (0x1EDA, "M", "ớ"), - (0x1EDB, "V"), - (0x1EDC, "M", "ờ"), - (0x1EDD, "V"), - (0x1EDE, "M", "ở"), - (0x1EDF, "V"), - (0x1EE0, "M", "ỡ"), - (0x1EE1, "V"), - (0x1EE2, "M", "ợ"), - (0x1EE3, "V"), - (0x1EE4, "M", "ụ"), - (0x1EE5, "V"), - (0x1EE6, "M", "ủ"), - (0x1EE7, "V"), - (0x1EE8, "M", "ứ"), - (0x1EE9, "V"), - (0x1EEA, "M", "ừ"), - (0x1EEB, "V"), - (0x1EEC, "M", "ử"), - (0x1EED, "V"), - (0x1EEE, "M", "ữ"), - (0x1EEF, "V"), - (0x1EF0, "M", "ự"), - ] - - -def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EF1, "V"), - (0x1EF2, "M", "ỳ"), - (0x1EF3, "V"), - (0x1EF4, "M", "ỵ"), - (0x1EF5, "V"), - (0x1EF6, "M", "ỷ"), - (0x1EF7, "V"), - (0x1EF8, "M", "ỹ"), - (0x1EF9, "V"), - (0x1EFA, "M", "ỻ"), - (0x1EFB, "V"), - (0x1EFC, "M", "ỽ"), - (0x1EFD, "V"), - (0x1EFE, "M", "ỿ"), - (0x1EFF, "V"), - (0x1F08, "M", "ἀ"), - (0x1F09, "M", "ἁ"), - (0x1F0A, "M", "ἂ"), - (0x1F0B, "M", "ἃ"), - (0x1F0C, "M", "ἄ"), - (0x1F0D, "M", "ἅ"), - (0x1F0E, "M", "ἆ"), - (0x1F0F, "M", "ἇ"), - (0x1F10, "V"), - (0x1F16, "X"), - (0x1F18, "M", "ἐ"), - (0x1F19, "M", "ἑ"), - (0x1F1A, "M", "ἒ"), - (0x1F1B, "M", "ἓ"), - (0x1F1C, "M", "ἔ"), - (0x1F1D, "M", "ἕ"), - (0x1F1E, "X"), - (0x1F20, "V"), - (0x1F28, "M", "ἠ"), - (0x1F29, "M", "ἡ"), - (0x1F2A, "M", "ἢ"), - (0x1F2B, "M", "ἣ"), - (0x1F2C, "M", "ἤ"), - (0x1F2D, "M", "ἥ"), - (0x1F2E, "M", "ἦ"), - (0x1F2F, "M", "ἧ"), - (0x1F30, "V"), - (0x1F38, "M", "ἰ"), - (0x1F39, "M", "ἱ"), - (0x1F3A, "M", "ἲ"), - (0x1F3B, "M", "ἳ"), - (0x1F3C, "M", "ἴ"), - (0x1F3D, "M", "ἵ"), - (0x1F3E, "M", "ἶ"), - (0x1F3F, "M", "ἷ"), - (0x1F40, "V"), - (0x1F46, "X"), - (0x1F48, "M", "ὀ"), - (0x1F49, "M", "ὁ"), - (0x1F4A, "M", "ὂ"), - (0x1F4B, "M", "ὃ"), - (0x1F4C, "M", "ὄ"), - (0x1F4D, "M", "ὅ"), - (0x1F4E, "X"), - (0x1F50, "V"), - (0x1F58, "X"), - (0x1F59, "M", "ὑ"), - (0x1F5A, "X"), - (0x1F5B, "M", "ὓ"), - (0x1F5C, "X"), - (0x1F5D, "M", "ὕ"), - (0x1F5E, "X"), - (0x1F5F, "M", "ὗ"), - (0x1F60, "V"), - (0x1F68, "M", "ὠ"), - (0x1F69, "M", "ὡ"), - (0x1F6A, "M", "ὢ"), - (0x1F6B, "M", "ὣ"), - (0x1F6C, "M", "ὤ"), - (0x1F6D, "M", "ὥ"), - (0x1F6E, "M", "ὦ"), - (0x1F6F, "M", "ὧ"), - (0x1F70, "V"), - (0x1F71, "M", "ά"), - (0x1F72, "V"), - (0x1F73, "M", "έ"), - (0x1F74, "V"), - (0x1F75, "M", "ή"), - (0x1F76, "V"), - (0x1F77, "M", "ί"), - (0x1F78, "V"), - (0x1F79, "M", "ό"), - (0x1F7A, "V"), - (0x1F7B, "M", "ύ"), - (0x1F7C, "V"), - (0x1F7D, "M", "ώ"), - (0x1F7E, "X"), - (0x1F80, "M", "ἀι"), - (0x1F81, "M", "ἁι"), - (0x1F82, "M", "ἂι"), - (0x1F83, "M", "ἃι"), - (0x1F84, "M", "ἄι"), - (0x1F85, "M", "ἅι"), - (0x1F86, "M", "ἆι"), - (0x1F87, "M", "ἇι"), - ] - - -def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F88, "M", "ἀι"), - (0x1F89, "M", "ἁι"), - (0x1F8A, "M", "ἂι"), - (0x1F8B, "M", "ἃι"), - (0x1F8C, "M", "ἄι"), - (0x1F8D, "M", "ἅι"), - (0x1F8E, "M", "ἆι"), - (0x1F8F, "M", "ἇι"), - (0x1F90, "M", "ἠι"), - (0x1F91, "M", "ἡι"), - (0x1F92, "M", "ἢι"), - (0x1F93, "M", "ἣι"), - (0x1F94, "M", "ἤι"), - (0x1F95, "M", "ἥι"), - (0x1F96, "M", "ἦι"), - (0x1F97, "M", "ἧι"), - (0x1F98, "M", "ἠι"), - (0x1F99, "M", "ἡι"), - (0x1F9A, "M", "ἢι"), - (0x1F9B, "M", "ἣι"), - (0x1F9C, "M", "ἤι"), - (0x1F9D, "M", "ἥι"), - (0x1F9E, "M", "ἦι"), - (0x1F9F, "M", "ἧι"), - (0x1FA0, "M", "ὠι"), - (0x1FA1, "M", "ὡι"), - (0x1FA2, "M", "ὢι"), - (0x1FA3, "M", "ὣι"), - (0x1FA4, "M", "ὤι"), - (0x1FA5, "M", "ὥι"), - (0x1FA6, "M", "ὦι"), - (0x1FA7, "M", "ὧι"), - (0x1FA8, "M", "ὠι"), - (0x1FA9, "M", "ὡι"), - (0x1FAA, "M", "ὢι"), - (0x1FAB, "M", "ὣι"), - (0x1FAC, "M", "ὤι"), - (0x1FAD, "M", "ὥι"), - (0x1FAE, "M", "ὦι"), - (0x1FAF, "M", "ὧι"), - (0x1FB0, "V"), - (0x1FB2, "M", "ὰι"), - (0x1FB3, "M", "αι"), - (0x1FB4, "M", "άι"), - (0x1FB5, "X"), - (0x1FB6, "V"), - (0x1FB7, "M", "ᾶι"), - (0x1FB8, "M", "ᾰ"), - (0x1FB9, "M", "ᾱ"), - (0x1FBA, "M", "ὰ"), - (0x1FBB, "M", "ά"), - (0x1FBC, "M", "αι"), - (0x1FBD, "3", " ̓"), - (0x1FBE, "M", "ι"), - (0x1FBF, "3", " ̓"), - (0x1FC0, "3", " ͂"), - (0x1FC1, "3", " ̈͂"), - (0x1FC2, "M", "ὴι"), - (0x1FC3, "M", "ηι"), - (0x1FC4, "M", "ήι"), - (0x1FC5, "X"), - (0x1FC6, "V"), - (0x1FC7, "M", "ῆι"), - (0x1FC8, "M", "ὲ"), - (0x1FC9, "M", "έ"), - (0x1FCA, "M", "ὴ"), - (0x1FCB, "M", "ή"), - (0x1FCC, "M", "ηι"), - (0x1FCD, "3", " ̓̀"), - (0x1FCE, "3", " ̓́"), - (0x1FCF, "3", " ̓͂"), - (0x1FD0, "V"), - (0x1FD3, "M", "ΐ"), - (0x1FD4, "X"), - (0x1FD6, "V"), - (0x1FD8, "M", "ῐ"), - (0x1FD9, "M", "ῑ"), - (0x1FDA, "M", "ὶ"), - (0x1FDB, "M", "ί"), - (0x1FDC, "X"), - (0x1FDD, "3", " ̔̀"), - (0x1FDE, "3", " ̔́"), - (0x1FDF, "3", " ̔͂"), - (0x1FE0, "V"), - (0x1FE3, "M", "ΰ"), - (0x1FE4, "V"), - (0x1FE8, "M", "ῠ"), - (0x1FE9, "M", "ῡ"), - (0x1FEA, "M", "ὺ"), - (0x1FEB, "M", "ύ"), - (0x1FEC, "M", "ῥ"), - (0x1FED, "3", " ̈̀"), - (0x1FEE, "3", " ̈́"), - (0x1FEF, "3", "`"), - (0x1FF0, "X"), - (0x1FF2, "M", "ὼι"), - (0x1FF3, "M", "ωι"), - (0x1FF4, "M", "ώι"), - (0x1FF5, "X"), - (0x1FF6, "V"), - ] - - -def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FF7, "M", "ῶι"), - (0x1FF8, "M", "ὸ"), - (0x1FF9, "M", "ό"), - (0x1FFA, "M", "ὼ"), - (0x1FFB, "M", "ώ"), - (0x1FFC, "M", "ωι"), - (0x1FFD, "3", " ́"), - (0x1FFE, "3", " ̔"), - (0x1FFF, "X"), - (0x2000, "3", " "), - (0x200B, "I"), - (0x200C, "D", ""), - (0x200E, "X"), - (0x2010, "V"), - (0x2011, "M", "‐"), - (0x2012, "V"), - (0x2017, "3", " ̳"), - (0x2018, "V"), - (0x2024, "X"), - (0x2027, "V"), - (0x2028, "X"), - (0x202F, "3", " "), - (0x2030, "V"), - (0x2033, "M", "′′"), - (0x2034, "M", "′′′"), - (0x2035, "V"), - (0x2036, "M", "‵‵"), - (0x2037, "M", "‵‵‵"), - (0x2038, "V"), - (0x203C, "3", "!!"), - (0x203D, "V"), - (0x203E, "3", " ̅"), - (0x203F, "V"), - (0x2047, "3", "??"), - (0x2048, "3", "?!"), - (0x2049, "3", "!?"), - (0x204A, "V"), - (0x2057, "M", "′′′′"), - (0x2058, "V"), - (0x205F, "3", " "), - (0x2060, "I"), - (0x2061, "X"), - (0x2064, "I"), - (0x2065, "X"), - (0x2070, "M", "0"), - (0x2071, "M", "i"), - (0x2072, "X"), - (0x2074, "M", "4"), - (0x2075, "M", "5"), - (0x2076, "M", "6"), - (0x2077, "M", "7"), - (0x2078, "M", "8"), - (0x2079, "M", "9"), - (0x207A, "3", "+"), - (0x207B, "M", "−"), - (0x207C, "3", "="), - (0x207D, "3", "("), - (0x207E, "3", ")"), - (0x207F, "M", "n"), - (0x2080, "M", "0"), - (0x2081, "M", "1"), - (0x2082, "M", "2"), - (0x2083, "M", "3"), - (0x2084, "M", "4"), - (0x2085, "M", "5"), - (0x2086, "M", "6"), - (0x2087, "M", "7"), - (0x2088, "M", "8"), - (0x2089, "M", "9"), - (0x208A, "3", "+"), - (0x208B, "M", "−"), - (0x208C, "3", "="), - (0x208D, "3", "("), - (0x208E, "3", ")"), - (0x208F, "X"), - (0x2090, "M", "a"), - (0x2091, "M", "e"), - (0x2092, "M", "o"), - (0x2093, "M", "x"), - (0x2094, "M", "ə"), - (0x2095, "M", "h"), - (0x2096, "M", "k"), - (0x2097, "M", "l"), - (0x2098, "M", "m"), - (0x2099, "M", "n"), - (0x209A, "M", "p"), - (0x209B, "M", "s"), - (0x209C, "M", "t"), - (0x209D, "X"), - (0x20A0, "V"), - (0x20A8, "M", "rs"), - (0x20A9, "V"), - (0x20C1, "X"), - (0x20D0, "V"), - (0x20F1, "X"), - (0x2100, "3", "a/c"), - (0x2101, "3", "a/s"), - (0x2102, "M", "c"), - (0x2103, "M", "°c"), - (0x2104, "V"), - ] - - -def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2105, "3", "c/o"), - (0x2106, "3", "c/u"), - (0x2107, "M", "ɛ"), - (0x2108, "V"), - (0x2109, "M", "°f"), - (0x210A, "M", "g"), - (0x210B, "M", "h"), - (0x210F, "M", "ħ"), - (0x2110, "M", "i"), - (0x2112, "M", "l"), - (0x2114, "V"), - (0x2115, "M", "n"), - (0x2116, "M", "no"), - (0x2117, "V"), - (0x2119, "M", "p"), - (0x211A, "M", "q"), - (0x211B, "M", "r"), - (0x211E, "V"), - (0x2120, "M", "sm"), - (0x2121, "M", "tel"), - (0x2122, "M", "tm"), - (0x2123, "V"), - (0x2124, "M", "z"), - (0x2125, "V"), - (0x2126, "M", "ω"), - (0x2127, "V"), - (0x2128, "M", "z"), - (0x2129, "V"), - (0x212A, "M", "k"), - (0x212B, "M", "å"), - (0x212C, "M", "b"), - (0x212D, "M", "c"), - (0x212E, "V"), - (0x212F, "M", "e"), - (0x2131, "M", "f"), - (0x2132, "X"), - (0x2133, "M", "m"), - (0x2134, "M", "o"), - (0x2135, "M", "א"), - (0x2136, "M", "ב"), - (0x2137, "M", "ג"), - (0x2138, "M", "ד"), - (0x2139, "M", "i"), - (0x213A, "V"), - (0x213B, "M", "fax"), - (0x213C, "M", "π"), - (0x213D, "M", "γ"), - (0x213F, "M", "π"), - (0x2140, "M", "∑"), - (0x2141, "V"), - (0x2145, "M", "d"), - (0x2147, "M", "e"), - (0x2148, "M", "i"), - (0x2149, "M", "j"), - (0x214A, "V"), - (0x2150, "M", "1⁄7"), - (0x2151, "M", "1⁄9"), - (0x2152, "M", "1⁄10"), - (0x2153, "M", "1⁄3"), - (0x2154, "M", "2⁄3"), - (0x2155, "M", "1⁄5"), - (0x2156, "M", "2⁄5"), - (0x2157, "M", "3⁄5"), - (0x2158, "M", "4⁄5"), - (0x2159, "M", "1⁄6"), - (0x215A, "M", "5⁄6"), - (0x215B, "M", "1⁄8"), - (0x215C, "M", "3⁄8"), - (0x215D, "M", "5⁄8"), - (0x215E, "M", "7⁄8"), - (0x215F, "M", "1⁄"), - (0x2160, "M", "i"), - (0x2161, "M", "ii"), - (0x2162, "M", "iii"), - (0x2163, "M", "iv"), - (0x2164, "M", "v"), - (0x2165, "M", "vi"), - (0x2166, "M", "vii"), - (0x2167, "M", "viii"), - (0x2168, "M", "ix"), - (0x2169, "M", "x"), - (0x216A, "M", "xi"), - (0x216B, "M", "xii"), - (0x216C, "M", "l"), - (0x216D, "M", "c"), - (0x216E, "M", "d"), - (0x216F, "M", "m"), - (0x2170, "M", "i"), - (0x2171, "M", "ii"), - (0x2172, "M", "iii"), - (0x2173, "M", "iv"), - (0x2174, "M", "v"), - (0x2175, "M", "vi"), - (0x2176, "M", "vii"), - (0x2177, "M", "viii"), - (0x2178, "M", "ix"), - (0x2179, "M", "x"), - (0x217A, "M", "xi"), - (0x217B, "M", "xii"), - (0x217C, "M", "l"), - ] - - -def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x217D, "M", "c"), - (0x217E, "M", "d"), - (0x217F, "M", "m"), - (0x2180, "V"), - (0x2183, "X"), - (0x2184, "V"), - (0x2189, "M", "0⁄3"), - (0x218A, "V"), - (0x218C, "X"), - (0x2190, "V"), - (0x222C, "M", "∫∫"), - (0x222D, "M", "∫∫∫"), - (0x222E, "V"), - (0x222F, "M", "∮∮"), - (0x2230, "M", "∮∮∮"), - (0x2231, "V"), - (0x2329, "M", "〈"), - (0x232A, "M", "〉"), - (0x232B, "V"), - (0x2427, "X"), - (0x2440, "V"), - (0x244B, "X"), - (0x2460, "M", "1"), - (0x2461, "M", "2"), - (0x2462, "M", "3"), - (0x2463, "M", "4"), - (0x2464, "M", "5"), - (0x2465, "M", "6"), - (0x2466, "M", "7"), - (0x2467, "M", "8"), - (0x2468, "M", "9"), - (0x2469, "M", "10"), - (0x246A, "M", "11"), - (0x246B, "M", "12"), - (0x246C, "M", "13"), - (0x246D, "M", "14"), - (0x246E, "M", "15"), - (0x246F, "M", "16"), - (0x2470, "M", "17"), - (0x2471, "M", "18"), - (0x2472, "M", "19"), - (0x2473, "M", "20"), - (0x2474, "3", "(1)"), - (0x2475, "3", "(2)"), - (0x2476, "3", "(3)"), - (0x2477, "3", "(4)"), - (0x2478, "3", "(5)"), - (0x2479, "3", "(6)"), - (0x247A, "3", "(7)"), - (0x247B, "3", "(8)"), - (0x247C, "3", "(9)"), - (0x247D, "3", "(10)"), - (0x247E, "3", "(11)"), - (0x247F, "3", "(12)"), - (0x2480, "3", "(13)"), - (0x2481, "3", "(14)"), - (0x2482, "3", "(15)"), - (0x2483, "3", "(16)"), - (0x2484, "3", "(17)"), - (0x2485, "3", "(18)"), - (0x2486, "3", "(19)"), - (0x2487, "3", "(20)"), - (0x2488, "X"), - (0x249C, "3", "(a)"), - (0x249D, "3", "(b)"), - (0x249E, "3", "(c)"), - (0x249F, "3", "(d)"), - (0x24A0, "3", "(e)"), - (0x24A1, "3", "(f)"), - (0x24A2, "3", "(g)"), - (0x24A3, "3", "(h)"), - (0x24A4, "3", "(i)"), - (0x24A5, "3", "(j)"), - (0x24A6, "3", "(k)"), - (0x24A7, "3", "(l)"), - (0x24A8, "3", "(m)"), - (0x24A9, "3", "(n)"), - (0x24AA, "3", "(o)"), - (0x24AB, "3", "(p)"), - (0x24AC, "3", "(q)"), - (0x24AD, "3", "(r)"), - (0x24AE, "3", "(s)"), - (0x24AF, "3", "(t)"), - (0x24B0, "3", "(u)"), - (0x24B1, "3", "(v)"), - (0x24B2, "3", "(w)"), - (0x24B3, "3", "(x)"), - (0x24B4, "3", "(y)"), - (0x24B5, "3", "(z)"), - (0x24B6, "M", "a"), - (0x24B7, "M", "b"), - (0x24B8, "M", "c"), - (0x24B9, "M", "d"), - (0x24BA, "M", "e"), - (0x24BB, "M", "f"), - (0x24BC, "M", "g"), - (0x24BD, "M", "h"), - (0x24BE, "M", "i"), - (0x24BF, "M", "j"), - (0x24C0, "M", "k"), - ] - - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x24C1, "M", "l"), - (0x24C2, "M", "m"), - (0x24C3, "M", "n"), - (0x24C4, "M", "o"), - (0x24C5, "M", "p"), - (0x24C6, "M", "q"), - (0x24C7, "M", "r"), - (0x24C8, "M", "s"), - (0x24C9, "M", "t"), - (0x24CA, "M", "u"), - (0x24CB, "M", "v"), - (0x24CC, "M", "w"), - (0x24CD, "M", "x"), - (0x24CE, "M", "y"), - (0x24CF, "M", "z"), - (0x24D0, "M", "a"), - (0x24D1, "M", "b"), - (0x24D2, "M", "c"), - (0x24D3, "M", "d"), - (0x24D4, "M", "e"), - (0x24D5, "M", "f"), - (0x24D6, "M", "g"), - (0x24D7, "M", "h"), - (0x24D8, "M", "i"), - (0x24D9, "M", "j"), - (0x24DA, "M", "k"), - (0x24DB, "M", "l"), - (0x24DC, "M", "m"), - (0x24DD, "M", "n"), - (0x24DE, "M", "o"), - (0x24DF, "M", "p"), - (0x24E0, "M", "q"), - (0x24E1, "M", "r"), - (0x24E2, "M", "s"), - (0x24E3, "M", "t"), - (0x24E4, "M", "u"), - (0x24E5, "M", "v"), - (0x24E6, "M", "w"), - (0x24E7, "M", "x"), - (0x24E8, "M", "y"), - (0x24E9, "M", "z"), - (0x24EA, "M", "0"), - (0x24EB, "V"), - (0x2A0C, "M", "∫∫∫∫"), - (0x2A0D, "V"), - (0x2A74, "3", "::="), - (0x2A75, "3", "=="), - (0x2A76, "3", "==="), - (0x2A77, "V"), - (0x2ADC, "M", "⫝̸"), - (0x2ADD, "V"), - (0x2B74, "X"), - (0x2B76, "V"), - (0x2B96, "X"), - (0x2B97, "V"), - (0x2C00, "M", "ⰰ"), - (0x2C01, "M", "ⰱ"), - (0x2C02, "M", "ⰲ"), - (0x2C03, "M", "ⰳ"), - (0x2C04, "M", "ⰴ"), - (0x2C05, "M", "ⰵ"), - (0x2C06, "M", "ⰶ"), - (0x2C07, "M", "ⰷ"), - (0x2C08, "M", "ⰸ"), - (0x2C09, "M", "ⰹ"), - (0x2C0A, "M", "ⰺ"), - (0x2C0B, "M", "ⰻ"), - (0x2C0C, "M", "ⰼ"), - (0x2C0D, "M", "ⰽ"), - (0x2C0E, "M", "ⰾ"), - (0x2C0F, "M", "ⰿ"), - (0x2C10, "M", "ⱀ"), - (0x2C11, "M", "ⱁ"), - (0x2C12, "M", "ⱂ"), - (0x2C13, "M", "ⱃ"), - (0x2C14, "M", "ⱄ"), - (0x2C15, "M", "ⱅ"), - (0x2C16, "M", "ⱆ"), - (0x2C17, "M", "ⱇ"), - (0x2C18, "M", "ⱈ"), - (0x2C19, "M", "ⱉ"), - (0x2C1A, "M", "ⱊ"), - (0x2C1B, "M", "ⱋ"), - (0x2C1C, "M", "ⱌ"), - (0x2C1D, "M", "ⱍ"), - (0x2C1E, "M", "ⱎ"), - (0x2C1F, "M", "ⱏ"), - (0x2C20, "M", "ⱐ"), - (0x2C21, "M", "ⱑ"), - (0x2C22, "M", "ⱒ"), - (0x2C23, "M", "ⱓ"), - (0x2C24, "M", "ⱔ"), - (0x2C25, "M", "ⱕ"), - (0x2C26, "M", "ⱖ"), - (0x2C27, "M", "ⱗ"), - (0x2C28, "M", "ⱘ"), - (0x2C29, "M", "ⱙ"), - (0x2C2A, "M", "ⱚ"), - (0x2C2B, "M", "ⱛ"), - (0x2C2C, "M", "ⱜ"), - ] - - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2C2D, "M", "ⱝ"), - (0x2C2E, "M", "ⱞ"), - (0x2C2F, "M", "ⱟ"), - (0x2C30, "V"), - (0x2C60, "M", "ⱡ"), - (0x2C61, "V"), - (0x2C62, "M", "ɫ"), - (0x2C63, "M", "ᵽ"), - (0x2C64, "M", "ɽ"), - (0x2C65, "V"), - (0x2C67, "M", "ⱨ"), - (0x2C68, "V"), - (0x2C69, "M", "ⱪ"), - (0x2C6A, "V"), - (0x2C6B, "M", "ⱬ"), - (0x2C6C, "V"), - (0x2C6D, "M", "ɑ"), - (0x2C6E, "M", "ɱ"), - (0x2C6F, "M", "ɐ"), - (0x2C70, "M", "ɒ"), - (0x2C71, "V"), - (0x2C72, "M", "ⱳ"), - (0x2C73, "V"), - (0x2C75, "M", "ⱶ"), - (0x2C76, "V"), - (0x2C7C, "M", "j"), - (0x2C7D, "M", "v"), - (0x2C7E, "M", "ȿ"), - (0x2C7F, "M", "ɀ"), - (0x2C80, "M", "ⲁ"), - (0x2C81, "V"), - (0x2C82, "M", "ⲃ"), - (0x2C83, "V"), - (0x2C84, "M", "ⲅ"), - (0x2C85, "V"), - (0x2C86, "M", "ⲇ"), - (0x2C87, "V"), - (0x2C88, "M", "ⲉ"), - (0x2C89, "V"), - (0x2C8A, "M", "ⲋ"), - (0x2C8B, "V"), - (0x2C8C, "M", "ⲍ"), - (0x2C8D, "V"), - (0x2C8E, "M", "ⲏ"), - (0x2C8F, "V"), - (0x2C90, "M", "ⲑ"), - (0x2C91, "V"), - (0x2C92, "M", "ⲓ"), - (0x2C93, "V"), - (0x2C94, "M", "ⲕ"), - (0x2C95, "V"), - (0x2C96, "M", "ⲗ"), - (0x2C97, "V"), - (0x2C98, "M", "ⲙ"), - (0x2C99, "V"), - (0x2C9A, "M", "ⲛ"), - (0x2C9B, "V"), - (0x2C9C, "M", "ⲝ"), - (0x2C9D, "V"), - (0x2C9E, "M", "ⲟ"), - (0x2C9F, "V"), - (0x2CA0, "M", "ⲡ"), - (0x2CA1, "V"), - (0x2CA2, "M", "ⲣ"), - (0x2CA3, "V"), - (0x2CA4, "M", "ⲥ"), - (0x2CA5, "V"), - (0x2CA6, "M", "ⲧ"), - (0x2CA7, "V"), - (0x2CA8, "M", "ⲩ"), - (0x2CA9, "V"), - (0x2CAA, "M", "ⲫ"), - (0x2CAB, "V"), - (0x2CAC, "M", "ⲭ"), - (0x2CAD, "V"), - (0x2CAE, "M", "ⲯ"), - (0x2CAF, "V"), - (0x2CB0, "M", "ⲱ"), - (0x2CB1, "V"), - (0x2CB2, "M", "ⲳ"), - (0x2CB3, "V"), - (0x2CB4, "M", "ⲵ"), - (0x2CB5, "V"), - (0x2CB6, "M", "ⲷ"), - (0x2CB7, "V"), - (0x2CB8, "M", "ⲹ"), - (0x2CB9, "V"), - (0x2CBA, "M", "ⲻ"), - (0x2CBB, "V"), - (0x2CBC, "M", "ⲽ"), - (0x2CBD, "V"), - (0x2CBE, "M", "ⲿ"), - (0x2CBF, "V"), - (0x2CC0, "M", "ⳁ"), - (0x2CC1, "V"), - (0x2CC2, "M", "ⳃ"), - (0x2CC3, "V"), - (0x2CC4, "M", "ⳅ"), - (0x2CC5, "V"), - (0x2CC6, "M", "ⳇ"), - ] - - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2CC7, "V"), - (0x2CC8, "M", "ⳉ"), - (0x2CC9, "V"), - (0x2CCA, "M", "ⳋ"), - (0x2CCB, "V"), - (0x2CCC, "M", "ⳍ"), - (0x2CCD, "V"), - (0x2CCE, "M", "ⳏ"), - (0x2CCF, "V"), - (0x2CD0, "M", "ⳑ"), - (0x2CD1, "V"), - (0x2CD2, "M", "ⳓ"), - (0x2CD3, "V"), - (0x2CD4, "M", "ⳕ"), - (0x2CD5, "V"), - (0x2CD6, "M", "ⳗ"), - (0x2CD7, "V"), - (0x2CD8, "M", "ⳙ"), - (0x2CD9, "V"), - (0x2CDA, "M", "ⳛ"), - (0x2CDB, "V"), - (0x2CDC, "M", "ⳝ"), - (0x2CDD, "V"), - (0x2CDE, "M", "ⳟ"), - (0x2CDF, "V"), - (0x2CE0, "M", "ⳡ"), - (0x2CE1, "V"), - (0x2CE2, "M", "ⳣ"), - (0x2CE3, "V"), - (0x2CEB, "M", "ⳬ"), - (0x2CEC, "V"), - (0x2CED, "M", "ⳮ"), - (0x2CEE, "V"), - (0x2CF2, "M", "ⳳ"), - (0x2CF3, "V"), - (0x2CF4, "X"), - (0x2CF9, "V"), - (0x2D26, "X"), - (0x2D27, "V"), - (0x2D28, "X"), - (0x2D2D, "V"), - (0x2D2E, "X"), - (0x2D30, "V"), - (0x2D68, "X"), - (0x2D6F, "M", "ⵡ"), - (0x2D70, "V"), - (0x2D71, "X"), - (0x2D7F, "V"), - (0x2D97, "X"), - (0x2DA0, "V"), - (0x2DA7, "X"), - (0x2DA8, "V"), - (0x2DAF, "X"), - (0x2DB0, "V"), - (0x2DB7, "X"), - (0x2DB8, "V"), - (0x2DBF, "X"), - (0x2DC0, "V"), - (0x2DC7, "X"), - (0x2DC8, "V"), - (0x2DCF, "X"), - (0x2DD0, "V"), - (0x2DD7, "X"), - (0x2DD8, "V"), - (0x2DDF, "X"), - (0x2DE0, "V"), - (0x2E5E, "X"), - (0x2E80, "V"), - (0x2E9A, "X"), - (0x2E9B, "V"), - (0x2E9F, "M", "母"), - (0x2EA0, "V"), - (0x2EF3, "M", "龟"), - (0x2EF4, "X"), - (0x2F00, "M", "一"), - (0x2F01, "M", "丨"), - (0x2F02, "M", "丶"), - (0x2F03, "M", "丿"), - (0x2F04, "M", "乙"), - (0x2F05, "M", "亅"), - (0x2F06, "M", "二"), - (0x2F07, "M", "亠"), - (0x2F08, "M", "人"), - (0x2F09, "M", "儿"), - (0x2F0A, "M", "入"), - (0x2F0B, "M", "八"), - (0x2F0C, "M", "冂"), - (0x2F0D, "M", "冖"), - (0x2F0E, "M", "冫"), - (0x2F0F, "M", "几"), - (0x2F10, "M", "凵"), - (0x2F11, "M", "刀"), - (0x2F12, "M", "力"), - (0x2F13, "M", "勹"), - (0x2F14, "M", "匕"), - (0x2F15, "M", "匚"), - (0x2F16, "M", "匸"), - (0x2F17, "M", "十"), - (0x2F18, "M", "卜"), - (0x2F19, "M", "卩"), - ] - - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F1A, "M", "厂"), - (0x2F1B, "M", "厶"), - (0x2F1C, "M", "又"), - (0x2F1D, "M", "口"), - (0x2F1E, "M", "囗"), - (0x2F1F, "M", "土"), - (0x2F20, "M", "士"), - (0x2F21, "M", "夂"), - (0x2F22, "M", "夊"), - (0x2F23, "M", "夕"), - (0x2F24, "M", "大"), - (0x2F25, "M", "女"), - (0x2F26, "M", "子"), - (0x2F27, "M", "宀"), - (0x2F28, "M", "寸"), - (0x2F29, "M", "小"), - (0x2F2A, "M", "尢"), - (0x2F2B, "M", "尸"), - (0x2F2C, "M", "屮"), - (0x2F2D, "M", "山"), - (0x2F2E, "M", "巛"), - (0x2F2F, "M", "工"), - (0x2F30, "M", "己"), - (0x2F31, "M", "巾"), - (0x2F32, "M", "干"), - (0x2F33, "M", "幺"), - (0x2F34, "M", "广"), - (0x2F35, "M", "廴"), - (0x2F36, "M", "廾"), - (0x2F37, "M", "弋"), - (0x2F38, "M", "弓"), - (0x2F39, "M", "彐"), - (0x2F3A, "M", "彡"), - (0x2F3B, "M", "彳"), - (0x2F3C, "M", "心"), - (0x2F3D, "M", "戈"), - (0x2F3E, "M", "戶"), - (0x2F3F, "M", "手"), - (0x2F40, "M", "支"), - (0x2F41, "M", "攴"), - (0x2F42, "M", "文"), - (0x2F43, "M", "斗"), - (0x2F44, "M", "斤"), - (0x2F45, "M", "方"), - (0x2F46, "M", "无"), - (0x2F47, "M", "日"), - (0x2F48, "M", "曰"), - (0x2F49, "M", "月"), - (0x2F4A, "M", "木"), - (0x2F4B, "M", "欠"), - (0x2F4C, "M", "止"), - (0x2F4D, "M", "歹"), - (0x2F4E, "M", "殳"), - (0x2F4F, "M", "毋"), - (0x2F50, "M", "比"), - (0x2F51, "M", "毛"), - (0x2F52, "M", "氏"), - (0x2F53, "M", "气"), - (0x2F54, "M", "水"), - (0x2F55, "M", "火"), - (0x2F56, "M", "爪"), - (0x2F57, "M", "父"), - (0x2F58, "M", "爻"), - (0x2F59, "M", "爿"), - (0x2F5A, "M", "片"), - (0x2F5B, "M", "牙"), - (0x2F5C, "M", "牛"), - (0x2F5D, "M", "犬"), - (0x2F5E, "M", "玄"), - (0x2F5F, "M", "玉"), - (0x2F60, "M", "瓜"), - (0x2F61, "M", "瓦"), - (0x2F62, "M", "甘"), - (0x2F63, "M", "生"), - (0x2F64, "M", "用"), - (0x2F65, "M", "田"), - (0x2F66, "M", "疋"), - (0x2F67, "M", "疒"), - (0x2F68, "M", "癶"), - (0x2F69, "M", "白"), - (0x2F6A, "M", "皮"), - (0x2F6B, "M", "皿"), - (0x2F6C, "M", "目"), - (0x2F6D, "M", "矛"), - (0x2F6E, "M", "矢"), - (0x2F6F, "M", "石"), - (0x2F70, "M", "示"), - (0x2F71, "M", "禸"), - (0x2F72, "M", "禾"), - (0x2F73, "M", "穴"), - (0x2F74, "M", "立"), - (0x2F75, "M", "竹"), - (0x2F76, "M", "米"), - (0x2F77, "M", "糸"), - (0x2F78, "M", "缶"), - (0x2F79, "M", "网"), - (0x2F7A, "M", "羊"), - (0x2F7B, "M", "羽"), - (0x2F7C, "M", "老"), - (0x2F7D, "M", "而"), - ] - - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F7E, "M", "耒"), - (0x2F7F, "M", "耳"), - (0x2F80, "M", "聿"), - (0x2F81, "M", "肉"), - (0x2F82, "M", "臣"), - (0x2F83, "M", "自"), - (0x2F84, "M", "至"), - (0x2F85, "M", "臼"), - (0x2F86, "M", "舌"), - (0x2F87, "M", "舛"), - (0x2F88, "M", "舟"), - (0x2F89, "M", "艮"), - (0x2F8A, "M", "色"), - (0x2F8B, "M", "艸"), - (0x2F8C, "M", "虍"), - (0x2F8D, "M", "虫"), - (0x2F8E, "M", "血"), - (0x2F8F, "M", "行"), - (0x2F90, "M", "衣"), - (0x2F91, "M", "襾"), - (0x2F92, "M", "見"), - (0x2F93, "M", "角"), - (0x2F94, "M", "言"), - (0x2F95, "M", "谷"), - (0x2F96, "M", "豆"), - (0x2F97, "M", "豕"), - (0x2F98, "M", "豸"), - (0x2F99, "M", "貝"), - (0x2F9A, "M", "赤"), - (0x2F9B, "M", "走"), - (0x2F9C, "M", "足"), - (0x2F9D, "M", "身"), - (0x2F9E, "M", "車"), - (0x2F9F, "M", "辛"), - (0x2FA0, "M", "辰"), - (0x2FA1, "M", "辵"), - (0x2FA2, "M", "邑"), - (0x2FA3, "M", "酉"), - (0x2FA4, "M", "釆"), - (0x2FA5, "M", "里"), - (0x2FA6, "M", "金"), - (0x2FA7, "M", "長"), - (0x2FA8, "M", "門"), - (0x2FA9, "M", "阜"), - (0x2FAA, "M", "隶"), - (0x2FAB, "M", "隹"), - (0x2FAC, "M", "雨"), - (0x2FAD, "M", "靑"), - (0x2FAE, "M", "非"), - (0x2FAF, "M", "面"), - (0x2FB0, "M", "革"), - (0x2FB1, "M", "韋"), - (0x2FB2, "M", "韭"), - (0x2FB3, "M", "音"), - (0x2FB4, "M", "頁"), - (0x2FB5, "M", "風"), - (0x2FB6, "M", "飛"), - (0x2FB7, "M", "食"), - (0x2FB8, "M", "首"), - (0x2FB9, "M", "香"), - (0x2FBA, "M", "馬"), - (0x2FBB, "M", "骨"), - (0x2FBC, "M", "高"), - (0x2FBD, "M", "髟"), - (0x2FBE, "M", "鬥"), - (0x2FBF, "M", "鬯"), - (0x2FC0, "M", "鬲"), - (0x2FC1, "M", "鬼"), - (0x2FC2, "M", "魚"), - (0x2FC3, "M", "鳥"), - (0x2FC4, "M", "鹵"), - (0x2FC5, "M", "鹿"), - (0x2FC6, "M", "麥"), - (0x2FC7, "M", "麻"), - (0x2FC8, "M", "黃"), - (0x2FC9, "M", "黍"), - (0x2FCA, "M", "黑"), - (0x2FCB, "M", "黹"), - (0x2FCC, "M", "黽"), - (0x2FCD, "M", "鼎"), - (0x2FCE, "M", "鼓"), - (0x2FCF, "M", "鼠"), - (0x2FD0, "M", "鼻"), - (0x2FD1, "M", "齊"), - (0x2FD2, "M", "齒"), - (0x2FD3, "M", "龍"), - (0x2FD4, "M", "龜"), - (0x2FD5, "M", "龠"), - (0x2FD6, "X"), - (0x3000, "3", " "), - (0x3001, "V"), - (0x3002, "M", "."), - (0x3003, "V"), - (0x3036, "M", "〒"), - (0x3037, "V"), - (0x3038, "M", "十"), - (0x3039, "M", "卄"), - (0x303A, "M", "卅"), - (0x303B, "V"), - (0x3040, "X"), - ] - - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3041, "V"), - (0x3097, "X"), - (0x3099, "V"), - (0x309B, "3", " ゙"), - (0x309C, "3", " ゚"), - (0x309D, "V"), - (0x309F, "M", "より"), - (0x30A0, "V"), - (0x30FF, "M", "コト"), - (0x3100, "X"), - (0x3105, "V"), - (0x3130, "X"), - (0x3131, "M", "ᄀ"), - (0x3132, "M", "ᄁ"), - (0x3133, "M", "ᆪ"), - (0x3134, "M", "ᄂ"), - (0x3135, "M", "ᆬ"), - (0x3136, "M", "ᆭ"), - (0x3137, "M", "ᄃ"), - (0x3138, "M", "ᄄ"), - (0x3139, "M", "ᄅ"), - (0x313A, "M", "ᆰ"), - (0x313B, "M", "ᆱ"), - (0x313C, "M", "ᆲ"), - (0x313D, "M", "ᆳ"), - (0x313E, "M", "ᆴ"), - (0x313F, "M", "ᆵ"), - (0x3140, "M", "ᄚ"), - (0x3141, "M", "ᄆ"), - (0x3142, "M", "ᄇ"), - (0x3143, "M", "ᄈ"), - (0x3144, "M", "ᄡ"), - (0x3145, "M", "ᄉ"), - (0x3146, "M", "ᄊ"), - (0x3147, "M", "ᄋ"), - (0x3148, "M", "ᄌ"), - (0x3149, "M", "ᄍ"), - (0x314A, "M", "ᄎ"), - (0x314B, "M", "ᄏ"), - (0x314C, "M", "ᄐ"), - (0x314D, "M", "ᄑ"), - (0x314E, "M", "ᄒ"), - (0x314F, "M", "ᅡ"), - (0x3150, "M", "ᅢ"), - (0x3151, "M", "ᅣ"), - (0x3152, "M", "ᅤ"), - (0x3153, "M", "ᅥ"), - (0x3154, "M", "ᅦ"), - (0x3155, "M", "ᅧ"), - (0x3156, "M", "ᅨ"), - (0x3157, "M", "ᅩ"), - (0x3158, "M", "ᅪ"), - (0x3159, "M", "ᅫ"), - (0x315A, "M", "ᅬ"), - (0x315B, "M", "ᅭ"), - (0x315C, "M", "ᅮ"), - (0x315D, "M", "ᅯ"), - (0x315E, "M", "ᅰ"), - (0x315F, "M", "ᅱ"), - (0x3160, "M", "ᅲ"), - (0x3161, "M", "ᅳ"), - (0x3162, "M", "ᅴ"), - (0x3163, "M", "ᅵ"), - (0x3164, "X"), - (0x3165, "M", "ᄔ"), - (0x3166, "M", "ᄕ"), - (0x3167, "M", "ᇇ"), - (0x3168, "M", "ᇈ"), - (0x3169, "M", "ᇌ"), - (0x316A, "M", "ᇎ"), - (0x316B, "M", "ᇓ"), - (0x316C, "M", "ᇗ"), - (0x316D, "M", "ᇙ"), - (0x316E, "M", "ᄜ"), - (0x316F, "M", "ᇝ"), - (0x3170, "M", "ᇟ"), - (0x3171, "M", "ᄝ"), - (0x3172, "M", "ᄞ"), - (0x3173, "M", "ᄠ"), - (0x3174, "M", "ᄢ"), - (0x3175, "M", "ᄣ"), - (0x3176, "M", "ᄧ"), - (0x3177, "M", "ᄩ"), - (0x3178, "M", "ᄫ"), - (0x3179, "M", "ᄬ"), - (0x317A, "M", "ᄭ"), - (0x317B, "M", "ᄮ"), - (0x317C, "M", "ᄯ"), - (0x317D, "M", "ᄲ"), - (0x317E, "M", "ᄶ"), - (0x317F, "M", "ᅀ"), - (0x3180, "M", "ᅇ"), - (0x3181, "M", "ᅌ"), - (0x3182, "M", "ᇱ"), - (0x3183, "M", "ᇲ"), - (0x3184, "M", "ᅗ"), - (0x3185, "M", "ᅘ"), - (0x3186, "M", "ᅙ"), - (0x3187, "M", "ᆄ"), - (0x3188, "M", "ᆅ"), - ] - - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3189, "M", "ᆈ"), - (0x318A, "M", "ᆑ"), - (0x318B, "M", "ᆒ"), - (0x318C, "M", "ᆔ"), - (0x318D, "M", "ᆞ"), - (0x318E, "M", "ᆡ"), - (0x318F, "X"), - (0x3190, "V"), - (0x3192, "M", "一"), - (0x3193, "M", "二"), - (0x3194, "M", "三"), - (0x3195, "M", "四"), - (0x3196, "M", "上"), - (0x3197, "M", "中"), - (0x3198, "M", "下"), - (0x3199, "M", "甲"), - (0x319A, "M", "乙"), - (0x319B, "M", "丙"), - (0x319C, "M", "丁"), - (0x319D, "M", "天"), - (0x319E, "M", "地"), - (0x319F, "M", "人"), - (0x31A0, "V"), - (0x31E4, "X"), - (0x31F0, "V"), - (0x3200, "3", "(ᄀ)"), - (0x3201, "3", "(ᄂ)"), - (0x3202, "3", "(ᄃ)"), - (0x3203, "3", "(ᄅ)"), - (0x3204, "3", "(ᄆ)"), - (0x3205, "3", "(ᄇ)"), - (0x3206, "3", "(ᄉ)"), - (0x3207, "3", "(ᄋ)"), - (0x3208, "3", "(ᄌ)"), - (0x3209, "3", "(ᄎ)"), - (0x320A, "3", "(ᄏ)"), - (0x320B, "3", "(ᄐ)"), - (0x320C, "3", "(ᄑ)"), - (0x320D, "3", "(ᄒ)"), - (0x320E, "3", "(가)"), - (0x320F, "3", "(나)"), - (0x3210, "3", "(다)"), - (0x3211, "3", "(라)"), - (0x3212, "3", "(마)"), - (0x3213, "3", "(바)"), - (0x3214, "3", "(사)"), - (0x3215, "3", "(아)"), - (0x3216, "3", "(자)"), - (0x3217, "3", "(차)"), - (0x3218, "3", "(카)"), - (0x3219, "3", "(타)"), - (0x321A, "3", "(파)"), - (0x321B, "3", "(하)"), - (0x321C, "3", "(주)"), - (0x321D, "3", "(오전)"), - (0x321E, "3", "(오후)"), - (0x321F, "X"), - (0x3220, "3", "(一)"), - (0x3221, "3", "(二)"), - (0x3222, "3", "(三)"), - (0x3223, "3", "(四)"), - (0x3224, "3", "(五)"), - (0x3225, "3", "(六)"), - (0x3226, "3", "(七)"), - (0x3227, "3", "(八)"), - (0x3228, "3", "(九)"), - (0x3229, "3", "(十)"), - (0x322A, "3", "(月)"), - (0x322B, "3", "(火)"), - (0x322C, "3", "(水)"), - (0x322D, "3", "(木)"), - (0x322E, "3", "(金)"), - (0x322F, "3", "(土)"), - (0x3230, "3", "(日)"), - (0x3231, "3", "(株)"), - (0x3232, "3", "(有)"), - (0x3233, "3", "(社)"), - (0x3234, "3", "(名)"), - (0x3235, "3", "(特)"), - (0x3236, "3", "(財)"), - (0x3237, "3", "(祝)"), - (0x3238, "3", "(労)"), - (0x3239, "3", "(代)"), - (0x323A, "3", "(呼)"), - (0x323B, "3", "(学)"), - (0x323C, "3", "(監)"), - (0x323D, "3", "(企)"), - (0x323E, "3", "(資)"), - (0x323F, "3", "(協)"), - (0x3240, "3", "(祭)"), - (0x3241, "3", "(休)"), - (0x3242, "3", "(自)"), - (0x3243, "3", "(至)"), - (0x3244, "M", "問"), - (0x3245, "M", "幼"), - (0x3246, "M", "文"), - (0x3247, "M", "箏"), - (0x3248, "V"), - (0x3250, "M", "pte"), - (0x3251, "M", "21"), - ] - - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3252, "M", "22"), - (0x3253, "M", "23"), - (0x3254, "M", "24"), - (0x3255, "M", "25"), - (0x3256, "M", "26"), - (0x3257, "M", "27"), - (0x3258, "M", "28"), - (0x3259, "M", "29"), - (0x325A, "M", "30"), - (0x325B, "M", "31"), - (0x325C, "M", "32"), - (0x325D, "M", "33"), - (0x325E, "M", "34"), - (0x325F, "M", "35"), - (0x3260, "M", "ᄀ"), - (0x3261, "M", "ᄂ"), - (0x3262, "M", "ᄃ"), - (0x3263, "M", "ᄅ"), - (0x3264, "M", "ᄆ"), - (0x3265, "M", "ᄇ"), - (0x3266, "M", "ᄉ"), - (0x3267, "M", "ᄋ"), - (0x3268, "M", "ᄌ"), - (0x3269, "M", "ᄎ"), - (0x326A, "M", "ᄏ"), - (0x326B, "M", "ᄐ"), - (0x326C, "M", "ᄑ"), - (0x326D, "M", "ᄒ"), - (0x326E, "M", "가"), - (0x326F, "M", "나"), - (0x3270, "M", "다"), - (0x3271, "M", "라"), - (0x3272, "M", "마"), - (0x3273, "M", "바"), - (0x3274, "M", "사"), - (0x3275, "M", "아"), - (0x3276, "M", "자"), - (0x3277, "M", "차"), - (0x3278, "M", "카"), - (0x3279, "M", "타"), - (0x327A, "M", "파"), - (0x327B, "M", "하"), - (0x327C, "M", "참고"), - (0x327D, "M", "주의"), - (0x327E, "M", "우"), - (0x327F, "V"), - (0x3280, "M", "一"), - (0x3281, "M", "二"), - (0x3282, "M", "三"), - (0x3283, "M", "四"), - (0x3284, "M", "五"), - (0x3285, "M", "六"), - (0x3286, "M", "七"), - (0x3287, "M", "八"), - (0x3288, "M", "九"), - (0x3289, "M", "十"), - (0x328A, "M", "月"), - (0x328B, "M", "火"), - (0x328C, "M", "水"), - (0x328D, "M", "木"), - (0x328E, "M", "金"), - (0x328F, "M", "土"), - (0x3290, "M", "日"), - (0x3291, "M", "株"), - (0x3292, "M", "有"), - (0x3293, "M", "社"), - (0x3294, "M", "名"), - (0x3295, "M", "特"), - (0x3296, "M", "財"), - (0x3297, "M", "祝"), - (0x3298, "M", "労"), - (0x3299, "M", "秘"), - (0x329A, "M", "男"), - (0x329B, "M", "女"), - (0x329C, "M", "適"), - (0x329D, "M", "優"), - (0x329E, "M", "印"), - (0x329F, "M", "注"), - (0x32A0, "M", "項"), - (0x32A1, "M", "休"), - (0x32A2, "M", "写"), - (0x32A3, "M", "正"), - (0x32A4, "M", "上"), - (0x32A5, "M", "中"), - (0x32A6, "M", "下"), - (0x32A7, "M", "左"), - (0x32A8, "M", "右"), - (0x32A9, "M", "医"), - (0x32AA, "M", "宗"), - (0x32AB, "M", "学"), - (0x32AC, "M", "監"), - (0x32AD, "M", "企"), - (0x32AE, "M", "資"), - (0x32AF, "M", "協"), - (0x32B0, "M", "夜"), - (0x32B1, "M", "36"), - (0x32B2, "M", "37"), - (0x32B3, "M", "38"), - (0x32B4, "M", "39"), - (0x32B5, "M", "40"), - ] - - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x32B6, "M", "41"), - (0x32B7, "M", "42"), - (0x32B8, "M", "43"), - (0x32B9, "M", "44"), - (0x32BA, "M", "45"), - (0x32BB, "M", "46"), - (0x32BC, "M", "47"), - (0x32BD, "M", "48"), - (0x32BE, "M", "49"), - (0x32BF, "M", "50"), - (0x32C0, "M", "1月"), - (0x32C1, "M", "2月"), - (0x32C2, "M", "3月"), - (0x32C3, "M", "4月"), - (0x32C4, "M", "5月"), - (0x32C5, "M", "6月"), - (0x32C6, "M", "7月"), - (0x32C7, "M", "8月"), - (0x32C8, "M", "9月"), - (0x32C9, "M", "10月"), - (0x32CA, "M", "11月"), - (0x32CB, "M", "12月"), - (0x32CC, "M", "hg"), - (0x32CD, "M", "erg"), - (0x32CE, "M", "ev"), - (0x32CF, "M", "ltd"), - (0x32D0, "M", "ア"), - (0x32D1, "M", "イ"), - (0x32D2, "M", "ウ"), - (0x32D3, "M", "エ"), - (0x32D4, "M", "オ"), - (0x32D5, "M", "カ"), - (0x32D6, "M", "キ"), - (0x32D7, "M", "ク"), - (0x32D8, "M", "ケ"), - (0x32D9, "M", "コ"), - (0x32DA, "M", "サ"), - (0x32DB, "M", "シ"), - (0x32DC, "M", "ス"), - (0x32DD, "M", "セ"), - (0x32DE, "M", "ソ"), - (0x32DF, "M", "タ"), - (0x32E0, "M", "チ"), - (0x32E1, "M", "ツ"), - (0x32E2, "M", "テ"), - (0x32E3, "M", "ト"), - (0x32E4, "M", "ナ"), - (0x32E5, "M", "ニ"), - (0x32E6, "M", "ヌ"), - (0x32E7, "M", "ネ"), - (0x32E8, "M", "ノ"), - (0x32E9, "M", "ハ"), - (0x32EA, "M", "ヒ"), - (0x32EB, "M", "フ"), - (0x32EC, "M", "ヘ"), - (0x32ED, "M", "ホ"), - (0x32EE, "M", "マ"), - (0x32EF, "M", "ミ"), - (0x32F0, "M", "ム"), - (0x32F1, "M", "メ"), - (0x32F2, "M", "モ"), - (0x32F3, "M", "ヤ"), - (0x32F4, "M", "ユ"), - (0x32F5, "M", "ヨ"), - (0x32F6, "M", "ラ"), - (0x32F7, "M", "リ"), - (0x32F8, "M", "ル"), - (0x32F9, "M", "レ"), - (0x32FA, "M", "ロ"), - (0x32FB, "M", "ワ"), - (0x32FC, "M", "ヰ"), - (0x32FD, "M", "ヱ"), - (0x32FE, "M", "ヲ"), - (0x32FF, "M", "令和"), - (0x3300, "M", "アパート"), - (0x3301, "M", "アルファ"), - (0x3302, "M", "アンペア"), - (0x3303, "M", "アール"), - (0x3304, "M", "イニング"), - (0x3305, "M", "インチ"), - (0x3306, "M", "ウォン"), - (0x3307, "M", "エスクード"), - (0x3308, "M", "エーカー"), - (0x3309, "M", "オンス"), - (0x330A, "M", "オーム"), - (0x330B, "M", "カイリ"), - (0x330C, "M", "カラット"), - (0x330D, "M", "カロリー"), - (0x330E, "M", "ガロン"), - (0x330F, "M", "ガンマ"), - (0x3310, "M", "ギガ"), - (0x3311, "M", "ギニー"), - (0x3312, "M", "キュリー"), - (0x3313, "M", "ギルダー"), - (0x3314, "M", "キロ"), - (0x3315, "M", "キログラム"), - (0x3316, "M", "キロメートル"), - (0x3317, "M", "キロワット"), - (0x3318, "M", "グラム"), - (0x3319, "M", "グラムトン"), - ] - - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x331A, "M", "クルゼイロ"), - (0x331B, "M", "クローネ"), - (0x331C, "M", "ケース"), - (0x331D, "M", "コルナ"), - (0x331E, "M", "コーポ"), - (0x331F, "M", "サイクル"), - (0x3320, "M", "サンチーム"), - (0x3321, "M", "シリング"), - (0x3322, "M", "センチ"), - (0x3323, "M", "セント"), - (0x3324, "M", "ダース"), - (0x3325, "M", "デシ"), - (0x3326, "M", "ドル"), - (0x3327, "M", "トン"), - (0x3328, "M", "ナノ"), - (0x3329, "M", "ノット"), - (0x332A, "M", "ハイツ"), - (0x332B, "M", "パーセント"), - (0x332C, "M", "パーツ"), - (0x332D, "M", "バーレル"), - (0x332E, "M", "ピアストル"), - (0x332F, "M", "ピクル"), - (0x3330, "M", "ピコ"), - (0x3331, "M", "ビル"), - (0x3332, "M", "ファラッド"), - (0x3333, "M", "フィート"), - (0x3334, "M", "ブッシェル"), - (0x3335, "M", "フラン"), - (0x3336, "M", "ヘクタール"), - (0x3337, "M", "ペソ"), - (0x3338, "M", "ペニヒ"), - (0x3339, "M", "ヘルツ"), - (0x333A, "M", "ペンス"), - (0x333B, "M", "ページ"), - (0x333C, "M", "ベータ"), - (0x333D, "M", "ポイント"), - (0x333E, "M", "ボルト"), - (0x333F, "M", "ホン"), - (0x3340, "M", "ポンド"), - (0x3341, "M", "ホール"), - (0x3342, "M", "ホーン"), - (0x3343, "M", "マイクロ"), - (0x3344, "M", "マイル"), - (0x3345, "M", "マッハ"), - (0x3346, "M", "マルク"), - (0x3347, "M", "マンション"), - (0x3348, "M", "ミクロン"), - (0x3349, "M", "ミリ"), - (0x334A, "M", "ミリバール"), - (0x334B, "M", "メガ"), - (0x334C, "M", "メガトン"), - (0x334D, "M", "メートル"), - (0x334E, "M", "ヤード"), - (0x334F, "M", "ヤール"), - (0x3350, "M", "ユアン"), - (0x3351, "M", "リットル"), - (0x3352, "M", "リラ"), - (0x3353, "M", "ルピー"), - (0x3354, "M", "ルーブル"), - (0x3355, "M", "レム"), - (0x3356, "M", "レントゲン"), - (0x3357, "M", "ワット"), - (0x3358, "M", "0点"), - (0x3359, "M", "1点"), - (0x335A, "M", "2点"), - (0x335B, "M", "3点"), - (0x335C, "M", "4点"), - (0x335D, "M", "5点"), - (0x335E, "M", "6点"), - (0x335F, "M", "7点"), - (0x3360, "M", "8点"), - (0x3361, "M", "9点"), - (0x3362, "M", "10点"), - (0x3363, "M", "11点"), - (0x3364, "M", "12点"), - (0x3365, "M", "13点"), - (0x3366, "M", "14点"), - (0x3367, "M", "15点"), - (0x3368, "M", "16点"), - (0x3369, "M", "17点"), - (0x336A, "M", "18点"), - (0x336B, "M", "19点"), - (0x336C, "M", "20点"), - (0x336D, "M", "21点"), - (0x336E, "M", "22点"), - (0x336F, "M", "23点"), - (0x3370, "M", "24点"), - (0x3371, "M", "hpa"), - (0x3372, "M", "da"), - (0x3373, "M", "au"), - (0x3374, "M", "bar"), - (0x3375, "M", "ov"), - (0x3376, "M", "pc"), - (0x3377, "M", "dm"), - (0x3378, "M", "dm2"), - (0x3379, "M", "dm3"), - (0x337A, "M", "iu"), - (0x337B, "M", "平成"), - (0x337C, "M", "昭和"), - (0x337D, "M", "大正"), - ] - - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x337E, "M", "明治"), - (0x337F, "M", "株式会社"), - (0x3380, "M", "pa"), - (0x3381, "M", "na"), - (0x3382, "M", "μa"), - (0x3383, "M", "ma"), - (0x3384, "M", "ka"), - (0x3385, "M", "kb"), - (0x3386, "M", "mb"), - (0x3387, "M", "gb"), - (0x3388, "M", "cal"), - (0x3389, "M", "kcal"), - (0x338A, "M", "pf"), - (0x338B, "M", "nf"), - (0x338C, "M", "μf"), - (0x338D, "M", "μg"), - (0x338E, "M", "mg"), - (0x338F, "M", "kg"), - (0x3390, "M", "hz"), - (0x3391, "M", "khz"), - (0x3392, "M", "mhz"), - (0x3393, "M", "ghz"), - (0x3394, "M", "thz"), - (0x3395, "M", "μl"), - (0x3396, "M", "ml"), - (0x3397, "M", "dl"), - (0x3398, "M", "kl"), - (0x3399, "M", "fm"), - (0x339A, "M", "nm"), - (0x339B, "M", "μm"), - (0x339C, "M", "mm"), - (0x339D, "M", "cm"), - (0x339E, "M", "km"), - (0x339F, "M", "mm2"), - (0x33A0, "M", "cm2"), - (0x33A1, "M", "m2"), - (0x33A2, "M", "km2"), - (0x33A3, "M", "mm3"), - (0x33A4, "M", "cm3"), - (0x33A5, "M", "m3"), - (0x33A6, "M", "km3"), - (0x33A7, "M", "m∕s"), - (0x33A8, "M", "m∕s2"), - (0x33A9, "M", "pa"), - (0x33AA, "M", "kpa"), - (0x33AB, "M", "mpa"), - (0x33AC, "M", "gpa"), - (0x33AD, "M", "rad"), - (0x33AE, "M", "rad∕s"), - (0x33AF, "M", "rad∕s2"), - (0x33B0, "M", "ps"), - (0x33B1, "M", "ns"), - (0x33B2, "M", "μs"), - (0x33B3, "M", "ms"), - (0x33B4, "M", "pv"), - (0x33B5, "M", "nv"), - (0x33B6, "M", "μv"), - (0x33B7, "M", "mv"), - (0x33B8, "M", "kv"), - (0x33B9, "M", "mv"), - (0x33BA, "M", "pw"), - (0x33BB, "M", "nw"), - (0x33BC, "M", "μw"), - (0x33BD, "M", "mw"), - (0x33BE, "M", "kw"), - (0x33BF, "M", "mw"), - (0x33C0, "M", "kω"), - (0x33C1, "M", "mω"), - (0x33C2, "X"), - (0x33C3, "M", "bq"), - (0x33C4, "M", "cc"), - (0x33C5, "M", "cd"), - (0x33C6, "M", "c∕kg"), - (0x33C7, "X"), - (0x33C8, "M", "db"), - (0x33C9, "M", "gy"), - (0x33CA, "M", "ha"), - (0x33CB, "M", "hp"), - (0x33CC, "M", "in"), - (0x33CD, "M", "kk"), - (0x33CE, "M", "km"), - (0x33CF, "M", "kt"), - (0x33D0, "M", "lm"), - (0x33D1, "M", "ln"), - (0x33D2, "M", "log"), - (0x33D3, "M", "lx"), - (0x33D4, "M", "mb"), - (0x33D5, "M", "mil"), - (0x33D6, "M", "mol"), - (0x33D7, "M", "ph"), - (0x33D8, "X"), - (0x33D9, "M", "ppm"), - (0x33DA, "M", "pr"), - (0x33DB, "M", "sr"), - (0x33DC, "M", "sv"), - (0x33DD, "M", "wb"), - (0x33DE, "M", "v∕m"), - (0x33DF, "M", "a∕m"), - (0x33E0, "M", "1日"), - (0x33E1, "M", "2日"), - ] - - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x33E2, "M", "3日"), - (0x33E3, "M", "4日"), - (0x33E4, "M", "5日"), - (0x33E5, "M", "6日"), - (0x33E6, "M", "7日"), - (0x33E7, "M", "8日"), - (0x33E8, "M", "9日"), - (0x33E9, "M", "10日"), - (0x33EA, "M", "11日"), - (0x33EB, "M", "12日"), - (0x33EC, "M", "13日"), - (0x33ED, "M", "14日"), - (0x33EE, "M", "15日"), - (0x33EF, "M", "16日"), - (0x33F0, "M", "17日"), - (0x33F1, "M", "18日"), - (0x33F2, "M", "19日"), - (0x33F3, "M", "20日"), - (0x33F4, "M", "21日"), - (0x33F5, "M", "22日"), - (0x33F6, "M", "23日"), - (0x33F7, "M", "24日"), - (0x33F8, "M", "25日"), - (0x33F9, "M", "26日"), - (0x33FA, "M", "27日"), - (0x33FB, "M", "28日"), - (0x33FC, "M", "29日"), - (0x33FD, "M", "30日"), - (0x33FE, "M", "31日"), - (0x33FF, "M", "gal"), - (0x3400, "V"), - (0xA48D, "X"), - (0xA490, "V"), - (0xA4C7, "X"), - (0xA4D0, "V"), - (0xA62C, "X"), - (0xA640, "M", "ꙁ"), - (0xA641, "V"), - (0xA642, "M", "ꙃ"), - (0xA643, "V"), - (0xA644, "M", "ꙅ"), - (0xA645, "V"), - (0xA646, "M", "ꙇ"), - (0xA647, "V"), - (0xA648, "M", "ꙉ"), - (0xA649, "V"), - (0xA64A, "M", "ꙋ"), - (0xA64B, "V"), - (0xA64C, "M", "ꙍ"), - (0xA64D, "V"), - (0xA64E, "M", "ꙏ"), - (0xA64F, "V"), - (0xA650, "M", "ꙑ"), - (0xA651, "V"), - (0xA652, "M", "ꙓ"), - (0xA653, "V"), - (0xA654, "M", "ꙕ"), - (0xA655, "V"), - (0xA656, "M", "ꙗ"), - (0xA657, "V"), - (0xA658, "M", "ꙙ"), - (0xA659, "V"), - (0xA65A, "M", "ꙛ"), - (0xA65B, "V"), - (0xA65C, "M", "ꙝ"), - (0xA65D, "V"), - (0xA65E, "M", "ꙟ"), - (0xA65F, "V"), - (0xA660, "M", "ꙡ"), - (0xA661, "V"), - (0xA662, "M", "ꙣ"), - (0xA663, "V"), - (0xA664, "M", "ꙥ"), - (0xA665, "V"), - (0xA666, "M", "ꙧ"), - (0xA667, "V"), - (0xA668, "M", "ꙩ"), - (0xA669, "V"), - (0xA66A, "M", "ꙫ"), - (0xA66B, "V"), - (0xA66C, "M", "ꙭ"), - (0xA66D, "V"), - (0xA680, "M", "ꚁ"), - (0xA681, "V"), - (0xA682, "M", "ꚃ"), - (0xA683, "V"), - (0xA684, "M", "ꚅ"), - (0xA685, "V"), - (0xA686, "M", "ꚇ"), - (0xA687, "V"), - (0xA688, "M", "ꚉ"), - (0xA689, "V"), - (0xA68A, "M", "ꚋ"), - (0xA68B, "V"), - (0xA68C, "M", "ꚍ"), - (0xA68D, "V"), - (0xA68E, "M", "ꚏ"), - (0xA68F, "V"), - (0xA690, "M", "ꚑ"), - (0xA691, "V"), - ] - - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA692, "M", "ꚓ"), - (0xA693, "V"), - (0xA694, "M", "ꚕ"), - (0xA695, "V"), - (0xA696, "M", "ꚗ"), - (0xA697, "V"), - (0xA698, "M", "ꚙ"), - (0xA699, "V"), - (0xA69A, "M", "ꚛ"), - (0xA69B, "V"), - (0xA69C, "M", "ъ"), - (0xA69D, "M", "ь"), - (0xA69E, "V"), - (0xA6F8, "X"), - (0xA700, "V"), - (0xA722, "M", "ꜣ"), - (0xA723, "V"), - (0xA724, "M", "ꜥ"), - (0xA725, "V"), - (0xA726, "M", "ꜧ"), - (0xA727, "V"), - (0xA728, "M", "ꜩ"), - (0xA729, "V"), - (0xA72A, "M", "ꜫ"), - (0xA72B, "V"), - (0xA72C, "M", "ꜭ"), - (0xA72D, "V"), - (0xA72E, "M", "ꜯ"), - (0xA72F, "V"), - (0xA732, "M", "ꜳ"), - (0xA733, "V"), - (0xA734, "M", "ꜵ"), - (0xA735, "V"), - (0xA736, "M", "ꜷ"), - (0xA737, "V"), - (0xA738, "M", "ꜹ"), - (0xA739, "V"), - (0xA73A, "M", "ꜻ"), - (0xA73B, "V"), - (0xA73C, "M", "ꜽ"), - (0xA73D, "V"), - (0xA73E, "M", "ꜿ"), - (0xA73F, "V"), - (0xA740, "M", "ꝁ"), - (0xA741, "V"), - (0xA742, "M", "ꝃ"), - (0xA743, "V"), - (0xA744, "M", "ꝅ"), - (0xA745, "V"), - (0xA746, "M", "ꝇ"), - (0xA747, "V"), - (0xA748, "M", "ꝉ"), - (0xA749, "V"), - (0xA74A, "M", "ꝋ"), - (0xA74B, "V"), - (0xA74C, "M", "ꝍ"), - (0xA74D, "V"), - (0xA74E, "M", "ꝏ"), - (0xA74F, "V"), - (0xA750, "M", "ꝑ"), - (0xA751, "V"), - (0xA752, "M", "ꝓ"), - (0xA753, "V"), - (0xA754, "M", "ꝕ"), - (0xA755, "V"), - (0xA756, "M", "ꝗ"), - (0xA757, "V"), - (0xA758, "M", "ꝙ"), - (0xA759, "V"), - (0xA75A, "M", "ꝛ"), - (0xA75B, "V"), - (0xA75C, "M", "ꝝ"), - (0xA75D, "V"), - (0xA75E, "M", "ꝟ"), - (0xA75F, "V"), - (0xA760, "M", "ꝡ"), - (0xA761, "V"), - (0xA762, "M", "ꝣ"), - (0xA763, "V"), - (0xA764, "M", "ꝥ"), - (0xA765, "V"), - (0xA766, "M", "ꝧ"), - (0xA767, "V"), - (0xA768, "M", "ꝩ"), - (0xA769, "V"), - (0xA76A, "M", "ꝫ"), - (0xA76B, "V"), - (0xA76C, "M", "ꝭ"), - (0xA76D, "V"), - (0xA76E, "M", "ꝯ"), - (0xA76F, "V"), - (0xA770, "M", "ꝯ"), - (0xA771, "V"), - (0xA779, "M", "ꝺ"), - (0xA77A, "V"), - (0xA77B, "M", "ꝼ"), - (0xA77C, "V"), - (0xA77D, "M", "ᵹ"), - (0xA77E, "M", "ꝿ"), - (0xA77F, "V"), - ] - - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA780, "M", "ꞁ"), - (0xA781, "V"), - (0xA782, "M", "ꞃ"), - (0xA783, "V"), - (0xA784, "M", "ꞅ"), - (0xA785, "V"), - (0xA786, "M", "ꞇ"), - (0xA787, "V"), - (0xA78B, "M", "ꞌ"), - (0xA78C, "V"), - (0xA78D, "M", "ɥ"), - (0xA78E, "V"), - (0xA790, "M", "ꞑ"), - (0xA791, "V"), - (0xA792, "M", "ꞓ"), - (0xA793, "V"), - (0xA796, "M", "ꞗ"), - (0xA797, "V"), - (0xA798, "M", "ꞙ"), - (0xA799, "V"), - (0xA79A, "M", "ꞛ"), - (0xA79B, "V"), - (0xA79C, "M", "ꞝ"), - (0xA79D, "V"), - (0xA79E, "M", "ꞟ"), - (0xA79F, "V"), - (0xA7A0, "M", "ꞡ"), - (0xA7A1, "V"), - (0xA7A2, "M", "ꞣ"), - (0xA7A3, "V"), - (0xA7A4, "M", "ꞥ"), - (0xA7A5, "V"), - (0xA7A6, "M", "ꞧ"), - (0xA7A7, "V"), - (0xA7A8, "M", "ꞩ"), - (0xA7A9, "V"), - (0xA7AA, "M", "ɦ"), - (0xA7AB, "M", "ɜ"), - (0xA7AC, "M", "ɡ"), - (0xA7AD, "M", "ɬ"), - (0xA7AE, "M", "ɪ"), - (0xA7AF, "V"), - (0xA7B0, "M", "ʞ"), - (0xA7B1, "M", "ʇ"), - (0xA7B2, "M", "ʝ"), - (0xA7B3, "M", "ꭓ"), - (0xA7B4, "M", "ꞵ"), - (0xA7B5, "V"), - (0xA7B6, "M", "ꞷ"), - (0xA7B7, "V"), - (0xA7B8, "M", "ꞹ"), - (0xA7B9, "V"), - (0xA7BA, "M", "ꞻ"), - (0xA7BB, "V"), - (0xA7BC, "M", "ꞽ"), - (0xA7BD, "V"), - (0xA7BE, "M", "ꞿ"), - (0xA7BF, "V"), - (0xA7C0, "M", "ꟁ"), - (0xA7C1, "V"), - (0xA7C2, "M", "ꟃ"), - (0xA7C3, "V"), - (0xA7C4, "M", "ꞔ"), - (0xA7C5, "M", "ʂ"), - (0xA7C6, "M", "ᶎ"), - (0xA7C7, "M", "ꟈ"), - (0xA7C8, "V"), - (0xA7C9, "M", "ꟊ"), - (0xA7CA, "V"), - (0xA7CB, "X"), - (0xA7D0, "M", "ꟑ"), - (0xA7D1, "V"), - (0xA7D2, "X"), - (0xA7D3, "V"), - (0xA7D4, "X"), - (0xA7D5, "V"), - (0xA7D6, "M", "ꟗ"), - (0xA7D7, "V"), - (0xA7D8, "M", "ꟙ"), - (0xA7D9, "V"), - (0xA7DA, "X"), - (0xA7F2, "M", "c"), - (0xA7F3, "M", "f"), - (0xA7F4, "M", "q"), - (0xA7F5, "M", "ꟶ"), - (0xA7F6, "V"), - (0xA7F8, "M", "ħ"), - (0xA7F9, "M", "œ"), - (0xA7FA, "V"), - (0xA82D, "X"), - (0xA830, "V"), - (0xA83A, "X"), - (0xA840, "V"), - (0xA878, "X"), - (0xA880, "V"), - (0xA8C6, "X"), - (0xA8CE, "V"), - (0xA8DA, "X"), - (0xA8E0, "V"), - (0xA954, "X"), - ] - - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA95F, "V"), - (0xA97D, "X"), - (0xA980, "V"), - (0xA9CE, "X"), - (0xA9CF, "V"), - (0xA9DA, "X"), - (0xA9DE, "V"), - (0xA9FF, "X"), - (0xAA00, "V"), - (0xAA37, "X"), - (0xAA40, "V"), - (0xAA4E, "X"), - (0xAA50, "V"), - (0xAA5A, "X"), - (0xAA5C, "V"), - (0xAAC3, "X"), - (0xAADB, "V"), - (0xAAF7, "X"), - (0xAB01, "V"), - (0xAB07, "X"), - (0xAB09, "V"), - (0xAB0F, "X"), - (0xAB11, "V"), - (0xAB17, "X"), - (0xAB20, "V"), - (0xAB27, "X"), - (0xAB28, "V"), - (0xAB2F, "X"), - (0xAB30, "V"), - (0xAB5C, "M", "ꜧ"), - (0xAB5D, "M", "ꬷ"), - (0xAB5E, "M", "ɫ"), - (0xAB5F, "M", "ꭒ"), - (0xAB60, "V"), - (0xAB69, "M", "ʍ"), - (0xAB6A, "V"), - (0xAB6C, "X"), - (0xAB70, "M", "Ꭰ"), - (0xAB71, "M", "Ꭱ"), - (0xAB72, "M", "Ꭲ"), - (0xAB73, "M", "Ꭳ"), - (0xAB74, "M", "Ꭴ"), - (0xAB75, "M", "Ꭵ"), - (0xAB76, "M", "Ꭶ"), - (0xAB77, "M", "Ꭷ"), - (0xAB78, "M", "Ꭸ"), - (0xAB79, "M", "Ꭹ"), - (0xAB7A, "M", "Ꭺ"), - (0xAB7B, "M", "Ꭻ"), - (0xAB7C, "M", "Ꭼ"), - (0xAB7D, "M", "Ꭽ"), - (0xAB7E, "M", "Ꭾ"), - (0xAB7F, "M", "Ꭿ"), - (0xAB80, "M", "Ꮀ"), - (0xAB81, "M", "Ꮁ"), - (0xAB82, "M", "Ꮂ"), - (0xAB83, "M", "Ꮃ"), - (0xAB84, "M", "Ꮄ"), - (0xAB85, "M", "Ꮅ"), - (0xAB86, "M", "Ꮆ"), - (0xAB87, "M", "Ꮇ"), - (0xAB88, "M", "Ꮈ"), - (0xAB89, "M", "Ꮉ"), - (0xAB8A, "M", "Ꮊ"), - (0xAB8B, "M", "Ꮋ"), - (0xAB8C, "M", "Ꮌ"), - (0xAB8D, "M", "Ꮍ"), - (0xAB8E, "M", "Ꮎ"), - (0xAB8F, "M", "Ꮏ"), - (0xAB90, "M", "Ꮐ"), - (0xAB91, "M", "Ꮑ"), - (0xAB92, "M", "Ꮒ"), - (0xAB93, "M", "Ꮓ"), - (0xAB94, "M", "Ꮔ"), - (0xAB95, "M", "Ꮕ"), - (0xAB96, "M", "Ꮖ"), - (0xAB97, "M", "Ꮗ"), - (0xAB98, "M", "Ꮘ"), - (0xAB99, "M", "Ꮙ"), - (0xAB9A, "M", "Ꮚ"), - (0xAB9B, "M", "Ꮛ"), - (0xAB9C, "M", "Ꮜ"), - (0xAB9D, "M", "Ꮝ"), - (0xAB9E, "M", "Ꮞ"), - (0xAB9F, "M", "Ꮟ"), - (0xABA0, "M", "Ꮠ"), - (0xABA1, "M", "Ꮡ"), - (0xABA2, "M", "Ꮢ"), - (0xABA3, "M", "Ꮣ"), - (0xABA4, "M", "Ꮤ"), - (0xABA5, "M", "Ꮥ"), - (0xABA6, "M", "Ꮦ"), - (0xABA7, "M", "Ꮧ"), - (0xABA8, "M", "Ꮨ"), - (0xABA9, "M", "Ꮩ"), - (0xABAA, "M", "Ꮪ"), - (0xABAB, "M", "Ꮫ"), - (0xABAC, "M", "Ꮬ"), - (0xABAD, "M", "Ꮭ"), - (0xABAE, "M", "Ꮮ"), - ] - - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xABAF, "M", "Ꮯ"), - (0xABB0, "M", "Ꮰ"), - (0xABB1, "M", "Ꮱ"), - (0xABB2, "M", "Ꮲ"), - (0xABB3, "M", "Ꮳ"), - (0xABB4, "M", "Ꮴ"), - (0xABB5, "M", "Ꮵ"), - (0xABB6, "M", "Ꮶ"), - (0xABB7, "M", "Ꮷ"), - (0xABB8, "M", "Ꮸ"), - (0xABB9, "M", "Ꮹ"), - (0xABBA, "M", "Ꮺ"), - (0xABBB, "M", "Ꮻ"), - (0xABBC, "M", "Ꮼ"), - (0xABBD, "M", "Ꮽ"), - (0xABBE, "M", "Ꮾ"), - (0xABBF, "M", "Ꮿ"), - (0xABC0, "V"), - (0xABEE, "X"), - (0xABF0, "V"), - (0xABFA, "X"), - (0xAC00, "V"), - (0xD7A4, "X"), - (0xD7B0, "V"), - (0xD7C7, "X"), - (0xD7CB, "V"), - (0xD7FC, "X"), - (0xF900, "M", "豈"), - (0xF901, "M", "更"), - (0xF902, "M", "車"), - (0xF903, "M", "賈"), - (0xF904, "M", "滑"), - (0xF905, "M", "串"), - (0xF906, "M", "句"), - (0xF907, "M", "龜"), - (0xF909, "M", "契"), - (0xF90A, "M", "金"), - (0xF90B, "M", "喇"), - (0xF90C, "M", "奈"), - (0xF90D, "M", "懶"), - (0xF90E, "M", "癩"), - (0xF90F, "M", "羅"), - (0xF910, "M", "蘿"), - (0xF911, "M", "螺"), - (0xF912, "M", "裸"), - (0xF913, "M", "邏"), - (0xF914, "M", "樂"), - (0xF915, "M", "洛"), - (0xF916, "M", "烙"), - (0xF917, "M", "珞"), - (0xF918, "M", "落"), - (0xF919, "M", "酪"), - (0xF91A, "M", "駱"), - (0xF91B, "M", "亂"), - (0xF91C, "M", "卵"), - (0xF91D, "M", "欄"), - (0xF91E, "M", "爛"), - (0xF91F, "M", "蘭"), - (0xF920, "M", "鸞"), - (0xF921, "M", "嵐"), - (0xF922, "M", "濫"), - (0xF923, "M", "藍"), - (0xF924, "M", "襤"), - (0xF925, "M", "拉"), - (0xF926, "M", "臘"), - (0xF927, "M", "蠟"), - (0xF928, "M", "廊"), - (0xF929, "M", "朗"), - (0xF92A, "M", "浪"), - (0xF92B, "M", "狼"), - (0xF92C, "M", "郎"), - (0xF92D, "M", "來"), - (0xF92E, "M", "冷"), - (0xF92F, "M", "勞"), - (0xF930, "M", "擄"), - (0xF931, "M", "櫓"), - (0xF932, "M", "爐"), - (0xF933, "M", "盧"), - (0xF934, "M", "老"), - (0xF935, "M", "蘆"), - (0xF936, "M", "虜"), - (0xF937, "M", "路"), - (0xF938, "M", "露"), - (0xF939, "M", "魯"), - (0xF93A, "M", "鷺"), - (0xF93B, "M", "碌"), - (0xF93C, "M", "祿"), - (0xF93D, "M", "綠"), - (0xF93E, "M", "菉"), - (0xF93F, "M", "錄"), - (0xF940, "M", "鹿"), - (0xF941, "M", "論"), - (0xF942, "M", "壟"), - (0xF943, "M", "弄"), - (0xF944, "M", "籠"), - (0xF945, "M", "聾"), - (0xF946, "M", "牢"), - (0xF947, "M", "磊"), - (0xF948, "M", "賂"), - (0xF949, "M", "雷"), - ] - - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF94A, "M", "壘"), - (0xF94B, "M", "屢"), - (0xF94C, "M", "樓"), - (0xF94D, "M", "淚"), - (0xF94E, "M", "漏"), - (0xF94F, "M", "累"), - (0xF950, "M", "縷"), - (0xF951, "M", "陋"), - (0xF952, "M", "勒"), - (0xF953, "M", "肋"), - (0xF954, "M", "凜"), - (0xF955, "M", "凌"), - (0xF956, "M", "稜"), - (0xF957, "M", "綾"), - (0xF958, "M", "菱"), - (0xF959, "M", "陵"), - (0xF95A, "M", "讀"), - (0xF95B, "M", "拏"), - (0xF95C, "M", "樂"), - (0xF95D, "M", "諾"), - (0xF95E, "M", "丹"), - (0xF95F, "M", "寧"), - (0xF960, "M", "怒"), - (0xF961, "M", "率"), - (0xF962, "M", "異"), - (0xF963, "M", "北"), - (0xF964, "M", "磻"), - (0xF965, "M", "便"), - (0xF966, "M", "復"), - (0xF967, "M", "不"), - (0xF968, "M", "泌"), - (0xF969, "M", "數"), - (0xF96A, "M", "索"), - (0xF96B, "M", "參"), - (0xF96C, "M", "塞"), - (0xF96D, "M", "省"), - (0xF96E, "M", "葉"), - (0xF96F, "M", "說"), - (0xF970, "M", "殺"), - (0xF971, "M", "辰"), - (0xF972, "M", "沈"), - (0xF973, "M", "拾"), - (0xF974, "M", "若"), - (0xF975, "M", "掠"), - (0xF976, "M", "略"), - (0xF977, "M", "亮"), - (0xF978, "M", "兩"), - (0xF979, "M", "凉"), - (0xF97A, "M", "梁"), - (0xF97B, "M", "糧"), - (0xF97C, "M", "良"), - (0xF97D, "M", "諒"), - (0xF97E, "M", "量"), - (0xF97F, "M", "勵"), - (0xF980, "M", "呂"), - (0xF981, "M", "女"), - (0xF982, "M", "廬"), - (0xF983, "M", "旅"), - (0xF984, "M", "濾"), - (0xF985, "M", "礪"), - (0xF986, "M", "閭"), - (0xF987, "M", "驪"), - (0xF988, "M", "麗"), - (0xF989, "M", "黎"), - (0xF98A, "M", "力"), - (0xF98B, "M", "曆"), - (0xF98C, "M", "歷"), - (0xF98D, "M", "轢"), - (0xF98E, "M", "年"), - (0xF98F, "M", "憐"), - (0xF990, "M", "戀"), - (0xF991, "M", "撚"), - (0xF992, "M", "漣"), - (0xF993, "M", "煉"), - (0xF994, "M", "璉"), - (0xF995, "M", "秊"), - (0xF996, "M", "練"), - (0xF997, "M", "聯"), - (0xF998, "M", "輦"), - (0xF999, "M", "蓮"), - (0xF99A, "M", "連"), - (0xF99B, "M", "鍊"), - (0xF99C, "M", "列"), - (0xF99D, "M", "劣"), - (0xF99E, "M", "咽"), - (0xF99F, "M", "烈"), - (0xF9A0, "M", "裂"), - (0xF9A1, "M", "說"), - (0xF9A2, "M", "廉"), - (0xF9A3, "M", "念"), - (0xF9A4, "M", "捻"), - (0xF9A5, "M", "殮"), - (0xF9A6, "M", "簾"), - (0xF9A7, "M", "獵"), - (0xF9A8, "M", "令"), - (0xF9A9, "M", "囹"), - (0xF9AA, "M", "寧"), - (0xF9AB, "M", "嶺"), - (0xF9AC, "M", "怜"), - (0xF9AD, "M", "玲"), - ] - - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF9AE, "M", "瑩"), - (0xF9AF, "M", "羚"), - (0xF9B0, "M", "聆"), - (0xF9B1, "M", "鈴"), - (0xF9B2, "M", "零"), - (0xF9B3, "M", "靈"), - (0xF9B4, "M", "領"), - (0xF9B5, "M", "例"), - (0xF9B6, "M", "禮"), - (0xF9B7, "M", "醴"), - (0xF9B8, "M", "隸"), - (0xF9B9, "M", "惡"), - (0xF9BA, "M", "了"), - (0xF9BB, "M", "僚"), - (0xF9BC, "M", "寮"), - (0xF9BD, "M", "尿"), - (0xF9BE, "M", "料"), - (0xF9BF, "M", "樂"), - (0xF9C0, "M", "燎"), - (0xF9C1, "M", "療"), - (0xF9C2, "M", "蓼"), - (0xF9C3, "M", "遼"), - (0xF9C4, "M", "龍"), - (0xF9C5, "M", "暈"), - (0xF9C6, "M", "阮"), - (0xF9C7, "M", "劉"), - (0xF9C8, "M", "杻"), - (0xF9C9, "M", "柳"), - (0xF9CA, "M", "流"), - (0xF9CB, "M", "溜"), - (0xF9CC, "M", "琉"), - (0xF9CD, "M", "留"), - (0xF9CE, "M", "硫"), - (0xF9CF, "M", "紐"), - (0xF9D0, "M", "類"), - (0xF9D1, "M", "六"), - (0xF9D2, "M", "戮"), - (0xF9D3, "M", "陸"), - (0xF9D4, "M", "倫"), - (0xF9D5, "M", "崙"), - (0xF9D6, "M", "淪"), - (0xF9D7, "M", "輪"), - (0xF9D8, "M", "律"), - (0xF9D9, "M", "慄"), - (0xF9DA, "M", "栗"), - (0xF9DB, "M", "率"), - (0xF9DC, "M", "隆"), - (0xF9DD, "M", "利"), - (0xF9DE, "M", "吏"), - (0xF9DF, "M", "履"), - (0xF9E0, "M", "易"), - (0xF9E1, "M", "李"), - (0xF9E2, "M", "梨"), - (0xF9E3, "M", "泥"), - (0xF9E4, "M", "理"), - (0xF9E5, "M", "痢"), - (0xF9E6, "M", "罹"), - (0xF9E7, "M", "裏"), - (0xF9E8, "M", "裡"), - (0xF9E9, "M", "里"), - (0xF9EA, "M", "離"), - (0xF9EB, "M", "匿"), - (0xF9EC, "M", "溺"), - (0xF9ED, "M", "吝"), - (0xF9EE, "M", "燐"), - (0xF9EF, "M", "璘"), - (0xF9F0, "M", "藺"), - (0xF9F1, "M", "隣"), - (0xF9F2, "M", "鱗"), - (0xF9F3, "M", "麟"), - (0xF9F4, "M", "林"), - (0xF9F5, "M", "淋"), - (0xF9F6, "M", "臨"), - (0xF9F7, "M", "立"), - (0xF9F8, "M", "笠"), - (0xF9F9, "M", "粒"), - (0xF9FA, "M", "狀"), - (0xF9FB, "M", "炙"), - (0xF9FC, "M", "識"), - (0xF9FD, "M", "什"), - (0xF9FE, "M", "茶"), - (0xF9FF, "M", "刺"), - (0xFA00, "M", "切"), - (0xFA01, "M", "度"), - (0xFA02, "M", "拓"), - (0xFA03, "M", "糖"), - (0xFA04, "M", "宅"), - (0xFA05, "M", "洞"), - (0xFA06, "M", "暴"), - (0xFA07, "M", "輻"), - (0xFA08, "M", "行"), - (0xFA09, "M", "降"), - (0xFA0A, "M", "見"), - (0xFA0B, "M", "廓"), - (0xFA0C, "M", "兀"), - (0xFA0D, "M", "嗀"), - (0xFA0E, "V"), - (0xFA10, "M", "塚"), - (0xFA11, "V"), - (0xFA12, "M", "晴"), - ] - - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA13, "V"), - (0xFA15, "M", "凞"), - (0xFA16, "M", "猪"), - (0xFA17, "M", "益"), - (0xFA18, "M", "礼"), - (0xFA19, "M", "神"), - (0xFA1A, "M", "祥"), - (0xFA1B, "M", "福"), - (0xFA1C, "M", "靖"), - (0xFA1D, "M", "精"), - (0xFA1E, "M", "羽"), - (0xFA1F, "V"), - (0xFA20, "M", "蘒"), - (0xFA21, "V"), - (0xFA22, "M", "諸"), - (0xFA23, "V"), - (0xFA25, "M", "逸"), - (0xFA26, "M", "都"), - (0xFA27, "V"), - (0xFA2A, "M", "飯"), - (0xFA2B, "M", "飼"), - (0xFA2C, "M", "館"), - (0xFA2D, "M", "鶴"), - (0xFA2E, "M", "郞"), - (0xFA2F, "M", "隷"), - (0xFA30, "M", "侮"), - (0xFA31, "M", "僧"), - (0xFA32, "M", "免"), - (0xFA33, "M", "勉"), - (0xFA34, "M", "勤"), - (0xFA35, "M", "卑"), - (0xFA36, "M", "喝"), - (0xFA37, "M", "嘆"), - (0xFA38, "M", "器"), - (0xFA39, "M", "塀"), - (0xFA3A, "M", "墨"), - (0xFA3B, "M", "層"), - (0xFA3C, "M", "屮"), - (0xFA3D, "M", "悔"), - (0xFA3E, "M", "慨"), - (0xFA3F, "M", "憎"), - (0xFA40, "M", "懲"), - (0xFA41, "M", "敏"), - (0xFA42, "M", "既"), - (0xFA43, "M", "暑"), - (0xFA44, "M", "梅"), - (0xFA45, "M", "海"), - (0xFA46, "M", "渚"), - (0xFA47, "M", "漢"), - (0xFA48, "M", "煮"), - (0xFA49, "M", "爫"), - (0xFA4A, "M", "琢"), - (0xFA4B, "M", "碑"), - (0xFA4C, "M", "社"), - (0xFA4D, "M", "祉"), - (0xFA4E, "M", "祈"), - (0xFA4F, "M", "祐"), - (0xFA50, "M", "祖"), - (0xFA51, "M", "祝"), - (0xFA52, "M", "禍"), - (0xFA53, "M", "禎"), - (0xFA54, "M", "穀"), - (0xFA55, "M", "突"), - (0xFA56, "M", "節"), - (0xFA57, "M", "練"), - (0xFA58, "M", "縉"), - (0xFA59, "M", "繁"), - (0xFA5A, "M", "署"), - (0xFA5B, "M", "者"), - (0xFA5C, "M", "臭"), - (0xFA5D, "M", "艹"), - (0xFA5F, "M", "著"), - (0xFA60, "M", "褐"), - (0xFA61, "M", "視"), - (0xFA62, "M", "謁"), - (0xFA63, "M", "謹"), - (0xFA64, "M", "賓"), - (0xFA65, "M", "贈"), - (0xFA66, "M", "辶"), - (0xFA67, "M", "逸"), - (0xFA68, "M", "難"), - (0xFA69, "M", "響"), - (0xFA6A, "M", "頻"), - (0xFA6B, "M", "恵"), - (0xFA6C, "M", "𤋮"), - (0xFA6D, "M", "舘"), - (0xFA6E, "X"), - (0xFA70, "M", "並"), - (0xFA71, "M", "况"), - (0xFA72, "M", "全"), - (0xFA73, "M", "侀"), - (0xFA74, "M", "充"), - (0xFA75, "M", "冀"), - (0xFA76, "M", "勇"), - (0xFA77, "M", "勺"), - (0xFA78, "M", "喝"), - (0xFA79, "M", "啕"), - (0xFA7A, "M", "喙"), - (0xFA7B, "M", "嗢"), - (0xFA7C, "M", "塚"), - ] - - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA7D, "M", "墳"), - (0xFA7E, "M", "奄"), - (0xFA7F, "M", "奔"), - (0xFA80, "M", "婢"), - (0xFA81, "M", "嬨"), - (0xFA82, "M", "廒"), - (0xFA83, "M", "廙"), - (0xFA84, "M", "彩"), - (0xFA85, "M", "徭"), - (0xFA86, "M", "惘"), - (0xFA87, "M", "慎"), - (0xFA88, "M", "愈"), - (0xFA89, "M", "憎"), - (0xFA8A, "M", "慠"), - (0xFA8B, "M", "懲"), - (0xFA8C, "M", "戴"), - (0xFA8D, "M", "揄"), - (0xFA8E, "M", "搜"), - (0xFA8F, "M", "摒"), - (0xFA90, "M", "敖"), - (0xFA91, "M", "晴"), - (0xFA92, "M", "朗"), - (0xFA93, "M", "望"), - (0xFA94, "M", "杖"), - (0xFA95, "M", "歹"), - (0xFA96, "M", "殺"), - (0xFA97, "M", "流"), - (0xFA98, "M", "滛"), - (0xFA99, "M", "滋"), - (0xFA9A, "M", "漢"), - (0xFA9B, "M", "瀞"), - (0xFA9C, "M", "煮"), - (0xFA9D, "M", "瞧"), - (0xFA9E, "M", "爵"), - (0xFA9F, "M", "犯"), - (0xFAA0, "M", "猪"), - (0xFAA1, "M", "瑱"), - (0xFAA2, "M", "甆"), - (0xFAA3, "M", "画"), - (0xFAA4, "M", "瘝"), - (0xFAA5, "M", "瘟"), - (0xFAA6, "M", "益"), - (0xFAA7, "M", "盛"), - (0xFAA8, "M", "直"), - (0xFAA9, "M", "睊"), - (0xFAAA, "M", "着"), - (0xFAAB, "M", "磌"), - (0xFAAC, "M", "窱"), - (0xFAAD, "M", "節"), - (0xFAAE, "M", "类"), - (0xFAAF, "M", "絛"), - (0xFAB0, "M", "練"), - (0xFAB1, "M", "缾"), - (0xFAB2, "M", "者"), - (0xFAB3, "M", "荒"), - (0xFAB4, "M", "華"), - (0xFAB5, "M", "蝹"), - (0xFAB6, "M", "襁"), - (0xFAB7, "M", "覆"), - (0xFAB8, "M", "視"), - (0xFAB9, "M", "調"), - (0xFABA, "M", "諸"), - (0xFABB, "M", "請"), - (0xFABC, "M", "謁"), - (0xFABD, "M", "諾"), - (0xFABE, "M", "諭"), - (0xFABF, "M", "謹"), - (0xFAC0, "M", "變"), - (0xFAC1, "M", "贈"), - (0xFAC2, "M", "輸"), - (0xFAC3, "M", "遲"), - (0xFAC4, "M", "醙"), - (0xFAC5, "M", "鉶"), - (0xFAC6, "M", "陼"), - (0xFAC7, "M", "難"), - (0xFAC8, "M", "靖"), - (0xFAC9, "M", "韛"), - (0xFACA, "M", "響"), - (0xFACB, "M", "頋"), - (0xFACC, "M", "頻"), - (0xFACD, "M", "鬒"), - (0xFACE, "M", "龜"), - (0xFACF, "M", "𢡊"), - (0xFAD0, "M", "𢡄"), - (0xFAD1, "M", "𣏕"), - (0xFAD2, "M", "㮝"), - (0xFAD3, "M", "䀘"), - (0xFAD4, "M", "䀹"), - (0xFAD5, "M", "𥉉"), - (0xFAD6, "M", "𥳐"), - (0xFAD7, "M", "𧻓"), - (0xFAD8, "M", "齃"), - (0xFAD9, "M", "龎"), - (0xFADA, "X"), - (0xFB00, "M", "ff"), - (0xFB01, "M", "fi"), - (0xFB02, "M", "fl"), - (0xFB03, "M", "ffi"), - (0xFB04, "M", "ffl"), - (0xFB05, "M", "st"), - ] - - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFB07, "X"), - (0xFB13, "M", "մն"), - (0xFB14, "M", "մե"), - (0xFB15, "M", "մի"), - (0xFB16, "M", "վն"), - (0xFB17, "M", "մխ"), - (0xFB18, "X"), - (0xFB1D, "M", "יִ"), - (0xFB1E, "V"), - (0xFB1F, "M", "ײַ"), - (0xFB20, "M", "ע"), - (0xFB21, "M", "א"), - (0xFB22, "M", "ד"), - (0xFB23, "M", "ה"), - (0xFB24, "M", "כ"), - (0xFB25, "M", "ל"), - (0xFB26, "M", "ם"), - (0xFB27, "M", "ר"), - (0xFB28, "M", "ת"), - (0xFB29, "3", "+"), - (0xFB2A, "M", "שׁ"), - (0xFB2B, "M", "שׂ"), - (0xFB2C, "M", "שּׁ"), - (0xFB2D, "M", "שּׂ"), - (0xFB2E, "M", "אַ"), - (0xFB2F, "M", "אָ"), - (0xFB30, "M", "אּ"), - (0xFB31, "M", "בּ"), - (0xFB32, "M", "גּ"), - (0xFB33, "M", "דּ"), - (0xFB34, "M", "הּ"), - (0xFB35, "M", "וּ"), - (0xFB36, "M", "זּ"), - (0xFB37, "X"), - (0xFB38, "M", "טּ"), - (0xFB39, "M", "יּ"), - (0xFB3A, "M", "ךּ"), - (0xFB3B, "M", "כּ"), - (0xFB3C, "M", "לּ"), - (0xFB3D, "X"), - (0xFB3E, "M", "מּ"), - (0xFB3F, "X"), - (0xFB40, "M", "נּ"), - (0xFB41, "M", "סּ"), - (0xFB42, "X"), - (0xFB43, "M", "ףּ"), - (0xFB44, "M", "פּ"), - (0xFB45, "X"), - (0xFB46, "M", "צּ"), - (0xFB47, "M", "קּ"), - (0xFB48, "M", "רּ"), - (0xFB49, "M", "שּ"), - (0xFB4A, "M", "תּ"), - (0xFB4B, "M", "וֹ"), - (0xFB4C, "M", "בֿ"), - (0xFB4D, "M", "כֿ"), - (0xFB4E, "M", "פֿ"), - (0xFB4F, "M", "אל"), - (0xFB50, "M", "ٱ"), - (0xFB52, "M", "ٻ"), - (0xFB56, "M", "پ"), - (0xFB5A, "M", "ڀ"), - (0xFB5E, "M", "ٺ"), - (0xFB62, "M", "ٿ"), - (0xFB66, "M", "ٹ"), - (0xFB6A, "M", "ڤ"), - (0xFB6E, "M", "ڦ"), - (0xFB72, "M", "ڄ"), - (0xFB76, "M", "ڃ"), - (0xFB7A, "M", "چ"), - (0xFB7E, "M", "ڇ"), - (0xFB82, "M", "ڍ"), - (0xFB84, "M", "ڌ"), - (0xFB86, "M", "ڎ"), - (0xFB88, "M", "ڈ"), - (0xFB8A, "M", "ژ"), - (0xFB8C, "M", "ڑ"), - (0xFB8E, "M", "ک"), - (0xFB92, "M", "گ"), - (0xFB96, "M", "ڳ"), - (0xFB9A, "M", "ڱ"), - (0xFB9E, "M", "ں"), - (0xFBA0, "M", "ڻ"), - (0xFBA4, "M", "ۀ"), - (0xFBA6, "M", "ہ"), - (0xFBAA, "M", "ھ"), - (0xFBAE, "M", "ے"), - (0xFBB0, "M", "ۓ"), - (0xFBB2, "V"), - (0xFBC3, "X"), - (0xFBD3, "M", "ڭ"), - (0xFBD7, "M", "ۇ"), - (0xFBD9, "M", "ۆ"), - (0xFBDB, "M", "ۈ"), - (0xFBDD, "M", "ۇٴ"), - (0xFBDE, "M", "ۋ"), - (0xFBE0, "M", "ۅ"), - (0xFBE2, "M", "ۉ"), - (0xFBE4, "M", "ې"), - (0xFBE8, "M", "ى"), - ] - - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFBEA, "M", "ئا"), - (0xFBEC, "M", "ئە"), - (0xFBEE, "M", "ئو"), - (0xFBF0, "M", "ئۇ"), - (0xFBF2, "M", "ئۆ"), - (0xFBF4, "M", "ئۈ"), - (0xFBF6, "M", "ئې"), - (0xFBF9, "M", "ئى"), - (0xFBFC, "M", "ی"), - (0xFC00, "M", "ئج"), - (0xFC01, "M", "ئح"), - (0xFC02, "M", "ئم"), - (0xFC03, "M", "ئى"), - (0xFC04, "M", "ئي"), - (0xFC05, "M", "بج"), - (0xFC06, "M", "بح"), - (0xFC07, "M", "بخ"), - (0xFC08, "M", "بم"), - (0xFC09, "M", "بى"), - (0xFC0A, "M", "بي"), - (0xFC0B, "M", "تج"), - (0xFC0C, "M", "تح"), - (0xFC0D, "M", "تخ"), - (0xFC0E, "M", "تم"), - (0xFC0F, "M", "تى"), - (0xFC10, "M", "تي"), - (0xFC11, "M", "ثج"), - (0xFC12, "M", "ثم"), - (0xFC13, "M", "ثى"), - (0xFC14, "M", "ثي"), - (0xFC15, "M", "جح"), - (0xFC16, "M", "جم"), - (0xFC17, "M", "حج"), - (0xFC18, "M", "حم"), - (0xFC19, "M", "خج"), - (0xFC1A, "M", "خح"), - (0xFC1B, "M", "خم"), - (0xFC1C, "M", "سج"), - (0xFC1D, "M", "سح"), - (0xFC1E, "M", "سخ"), - (0xFC1F, "M", "سم"), - (0xFC20, "M", "صح"), - (0xFC21, "M", "صم"), - (0xFC22, "M", "ضج"), - (0xFC23, "M", "ضح"), - (0xFC24, "M", "ضخ"), - (0xFC25, "M", "ضم"), - (0xFC26, "M", "طح"), - (0xFC27, "M", "طم"), - (0xFC28, "M", "ظم"), - (0xFC29, "M", "عج"), - (0xFC2A, "M", "عم"), - (0xFC2B, "M", "غج"), - (0xFC2C, "M", "غم"), - (0xFC2D, "M", "فج"), - (0xFC2E, "M", "فح"), - (0xFC2F, "M", "فخ"), - (0xFC30, "M", "فم"), - (0xFC31, "M", "فى"), - (0xFC32, "M", "في"), - (0xFC33, "M", "قح"), - (0xFC34, "M", "قم"), - (0xFC35, "M", "قى"), - (0xFC36, "M", "قي"), - (0xFC37, "M", "كا"), - (0xFC38, "M", "كج"), - (0xFC39, "M", "كح"), - (0xFC3A, "M", "كخ"), - (0xFC3B, "M", "كل"), - (0xFC3C, "M", "كم"), - (0xFC3D, "M", "كى"), - (0xFC3E, "M", "كي"), - (0xFC3F, "M", "لج"), - (0xFC40, "M", "لح"), - (0xFC41, "M", "لخ"), - (0xFC42, "M", "لم"), - (0xFC43, "M", "لى"), - (0xFC44, "M", "لي"), - (0xFC45, "M", "مج"), - (0xFC46, "M", "مح"), - (0xFC47, "M", "مخ"), - (0xFC48, "M", "مم"), - (0xFC49, "M", "مى"), - (0xFC4A, "M", "مي"), - (0xFC4B, "M", "نج"), - (0xFC4C, "M", "نح"), - (0xFC4D, "M", "نخ"), - (0xFC4E, "M", "نم"), - (0xFC4F, "M", "نى"), - (0xFC50, "M", "ني"), - (0xFC51, "M", "هج"), - (0xFC52, "M", "هم"), - (0xFC53, "M", "هى"), - (0xFC54, "M", "هي"), - (0xFC55, "M", "يج"), - (0xFC56, "M", "يح"), - (0xFC57, "M", "يخ"), - (0xFC58, "M", "يم"), - (0xFC59, "M", "يى"), - (0xFC5A, "M", "يي"), - ] - - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC5B, "M", "ذٰ"), - (0xFC5C, "M", "رٰ"), - (0xFC5D, "M", "ىٰ"), - (0xFC5E, "3", " ٌّ"), - (0xFC5F, "3", " ٍّ"), - (0xFC60, "3", " َّ"), - (0xFC61, "3", " ُّ"), - (0xFC62, "3", " ِّ"), - (0xFC63, "3", " ّٰ"), - (0xFC64, "M", "ئر"), - (0xFC65, "M", "ئز"), - (0xFC66, "M", "ئم"), - (0xFC67, "M", "ئن"), - (0xFC68, "M", "ئى"), - (0xFC69, "M", "ئي"), - (0xFC6A, "M", "بر"), - (0xFC6B, "M", "بز"), - (0xFC6C, "M", "بم"), - (0xFC6D, "M", "بن"), - (0xFC6E, "M", "بى"), - (0xFC6F, "M", "بي"), - (0xFC70, "M", "تر"), - (0xFC71, "M", "تز"), - (0xFC72, "M", "تم"), - (0xFC73, "M", "تن"), - (0xFC74, "M", "تى"), - (0xFC75, "M", "تي"), - (0xFC76, "M", "ثر"), - (0xFC77, "M", "ثز"), - (0xFC78, "M", "ثم"), - (0xFC79, "M", "ثن"), - (0xFC7A, "M", "ثى"), - (0xFC7B, "M", "ثي"), - (0xFC7C, "M", "فى"), - (0xFC7D, "M", "في"), - (0xFC7E, "M", "قى"), - (0xFC7F, "M", "قي"), - (0xFC80, "M", "كا"), - (0xFC81, "M", "كل"), - (0xFC82, "M", "كم"), - (0xFC83, "M", "كى"), - (0xFC84, "M", "كي"), - (0xFC85, "M", "لم"), - (0xFC86, "M", "لى"), - (0xFC87, "M", "لي"), - (0xFC88, "M", "ما"), - (0xFC89, "M", "مم"), - (0xFC8A, "M", "نر"), - (0xFC8B, "M", "نز"), - (0xFC8C, "M", "نم"), - (0xFC8D, "M", "نن"), - (0xFC8E, "M", "نى"), - (0xFC8F, "M", "ني"), - (0xFC90, "M", "ىٰ"), - (0xFC91, "M", "ير"), - (0xFC92, "M", "يز"), - (0xFC93, "M", "يم"), - (0xFC94, "M", "ين"), - (0xFC95, "M", "يى"), - (0xFC96, "M", "يي"), - (0xFC97, "M", "ئج"), - (0xFC98, "M", "ئح"), - (0xFC99, "M", "ئخ"), - (0xFC9A, "M", "ئم"), - (0xFC9B, "M", "ئه"), - (0xFC9C, "M", "بج"), - (0xFC9D, "M", "بح"), - (0xFC9E, "M", "بخ"), - (0xFC9F, "M", "بم"), - (0xFCA0, "M", "به"), - (0xFCA1, "M", "تج"), - (0xFCA2, "M", "تح"), - (0xFCA3, "M", "تخ"), - (0xFCA4, "M", "تم"), - (0xFCA5, "M", "ته"), - (0xFCA6, "M", "ثم"), - (0xFCA7, "M", "جح"), - (0xFCA8, "M", "جم"), - (0xFCA9, "M", "حج"), - (0xFCAA, "M", "حم"), - (0xFCAB, "M", "خج"), - (0xFCAC, "M", "خم"), - (0xFCAD, "M", "سج"), - (0xFCAE, "M", "سح"), - (0xFCAF, "M", "سخ"), - (0xFCB0, "M", "سم"), - (0xFCB1, "M", "صح"), - (0xFCB2, "M", "صخ"), - (0xFCB3, "M", "صم"), - (0xFCB4, "M", "ضج"), - (0xFCB5, "M", "ضح"), - (0xFCB6, "M", "ضخ"), - (0xFCB7, "M", "ضم"), - (0xFCB8, "M", "طح"), - (0xFCB9, "M", "ظم"), - (0xFCBA, "M", "عج"), - (0xFCBB, "M", "عم"), - (0xFCBC, "M", "غج"), - (0xFCBD, "M", "غم"), - (0xFCBE, "M", "فج"), - ] - - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFCBF, "M", "فح"), - (0xFCC0, "M", "فخ"), - (0xFCC1, "M", "فم"), - (0xFCC2, "M", "قح"), - (0xFCC3, "M", "قم"), - (0xFCC4, "M", "كج"), - (0xFCC5, "M", "كح"), - (0xFCC6, "M", "كخ"), - (0xFCC7, "M", "كل"), - (0xFCC8, "M", "كم"), - (0xFCC9, "M", "لج"), - (0xFCCA, "M", "لح"), - (0xFCCB, "M", "لخ"), - (0xFCCC, "M", "لم"), - (0xFCCD, "M", "له"), - (0xFCCE, "M", "مج"), - (0xFCCF, "M", "مح"), - (0xFCD0, "M", "مخ"), - (0xFCD1, "M", "مم"), - (0xFCD2, "M", "نج"), - (0xFCD3, "M", "نح"), - (0xFCD4, "M", "نخ"), - (0xFCD5, "M", "نم"), - (0xFCD6, "M", "نه"), - (0xFCD7, "M", "هج"), - (0xFCD8, "M", "هم"), - (0xFCD9, "M", "هٰ"), - (0xFCDA, "M", "يج"), - (0xFCDB, "M", "يح"), - (0xFCDC, "M", "يخ"), - (0xFCDD, "M", "يم"), - (0xFCDE, "M", "يه"), - (0xFCDF, "M", "ئم"), - (0xFCE0, "M", "ئه"), - (0xFCE1, "M", "بم"), - (0xFCE2, "M", "به"), - (0xFCE3, "M", "تم"), - (0xFCE4, "M", "ته"), - (0xFCE5, "M", "ثم"), - (0xFCE6, "M", "ثه"), - (0xFCE7, "M", "سم"), - (0xFCE8, "M", "سه"), - (0xFCE9, "M", "شم"), - (0xFCEA, "M", "شه"), - (0xFCEB, "M", "كل"), - (0xFCEC, "M", "كم"), - (0xFCED, "M", "لم"), - (0xFCEE, "M", "نم"), - (0xFCEF, "M", "نه"), - (0xFCF0, "M", "يم"), - (0xFCF1, "M", "يه"), - (0xFCF2, "M", "ـَّ"), - (0xFCF3, "M", "ـُّ"), - (0xFCF4, "M", "ـِّ"), - (0xFCF5, "M", "طى"), - (0xFCF6, "M", "طي"), - (0xFCF7, "M", "عى"), - (0xFCF8, "M", "عي"), - (0xFCF9, "M", "غى"), - (0xFCFA, "M", "غي"), - (0xFCFB, "M", "سى"), - (0xFCFC, "M", "سي"), - (0xFCFD, "M", "شى"), - (0xFCFE, "M", "شي"), - (0xFCFF, "M", "حى"), - (0xFD00, "M", "حي"), - (0xFD01, "M", "جى"), - (0xFD02, "M", "جي"), - (0xFD03, "M", "خى"), - (0xFD04, "M", "خي"), - (0xFD05, "M", "صى"), - (0xFD06, "M", "صي"), - (0xFD07, "M", "ضى"), - (0xFD08, "M", "ضي"), - (0xFD09, "M", "شج"), - (0xFD0A, "M", "شح"), - (0xFD0B, "M", "شخ"), - (0xFD0C, "M", "شم"), - (0xFD0D, "M", "شر"), - (0xFD0E, "M", "سر"), - (0xFD0F, "M", "صر"), - (0xFD10, "M", "ضر"), - (0xFD11, "M", "طى"), - (0xFD12, "M", "طي"), - (0xFD13, "M", "عى"), - (0xFD14, "M", "عي"), - (0xFD15, "M", "غى"), - (0xFD16, "M", "غي"), - (0xFD17, "M", "سى"), - (0xFD18, "M", "سي"), - (0xFD19, "M", "شى"), - (0xFD1A, "M", "شي"), - (0xFD1B, "M", "حى"), - (0xFD1C, "M", "حي"), - (0xFD1D, "M", "جى"), - (0xFD1E, "M", "جي"), - (0xFD1F, "M", "خى"), - (0xFD20, "M", "خي"), - (0xFD21, "M", "صى"), - (0xFD22, "M", "صي"), - ] - - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFD23, "M", "ضى"), - (0xFD24, "M", "ضي"), - (0xFD25, "M", "شج"), - (0xFD26, "M", "شح"), - (0xFD27, "M", "شخ"), - (0xFD28, "M", "شم"), - (0xFD29, "M", "شر"), - (0xFD2A, "M", "سر"), - (0xFD2B, "M", "صر"), - (0xFD2C, "M", "ضر"), - (0xFD2D, "M", "شج"), - (0xFD2E, "M", "شح"), - (0xFD2F, "M", "شخ"), - (0xFD30, "M", "شم"), - (0xFD31, "M", "سه"), - (0xFD32, "M", "شه"), - (0xFD33, "M", "طم"), - (0xFD34, "M", "سج"), - (0xFD35, "M", "سح"), - (0xFD36, "M", "سخ"), - (0xFD37, "M", "شج"), - (0xFD38, "M", "شح"), - (0xFD39, "M", "شخ"), - (0xFD3A, "M", "طم"), - (0xFD3B, "M", "ظم"), - (0xFD3C, "M", "اً"), - (0xFD3E, "V"), - (0xFD50, "M", "تجم"), - (0xFD51, "M", "تحج"), - (0xFD53, "M", "تحم"), - (0xFD54, "M", "تخم"), - (0xFD55, "M", "تمج"), - (0xFD56, "M", "تمح"), - (0xFD57, "M", "تمخ"), - (0xFD58, "M", "جمح"), - (0xFD5A, "M", "حمي"), - (0xFD5B, "M", "حمى"), - (0xFD5C, "M", "سحج"), - (0xFD5D, "M", "سجح"), - (0xFD5E, "M", "سجى"), - (0xFD5F, "M", "سمح"), - (0xFD61, "M", "سمج"), - (0xFD62, "M", "سمم"), - (0xFD64, "M", "صحح"), - (0xFD66, "M", "صمم"), - (0xFD67, "M", "شحم"), - (0xFD69, "M", "شجي"), - (0xFD6A, "M", "شمخ"), - (0xFD6C, "M", "شمم"), - (0xFD6E, "M", "ضحى"), - (0xFD6F, "M", "ضخم"), - (0xFD71, "M", "طمح"), - (0xFD73, "M", "طمم"), - (0xFD74, "M", "طمي"), - (0xFD75, "M", "عجم"), - (0xFD76, "M", "عمم"), - (0xFD78, "M", "عمى"), - (0xFD79, "M", "غمم"), - (0xFD7A, "M", "غمي"), - (0xFD7B, "M", "غمى"), - (0xFD7C, "M", "فخم"), - (0xFD7E, "M", "قمح"), - (0xFD7F, "M", "قمم"), - (0xFD80, "M", "لحم"), - (0xFD81, "M", "لحي"), - (0xFD82, "M", "لحى"), - (0xFD83, "M", "لجج"), - (0xFD85, "M", "لخم"), - (0xFD87, "M", "لمح"), - (0xFD89, "M", "محج"), - (0xFD8A, "M", "محم"), - (0xFD8B, "M", "محي"), - (0xFD8C, "M", "مجح"), - (0xFD8D, "M", "مجم"), - (0xFD8E, "M", "مخج"), - (0xFD8F, "M", "مخم"), - (0xFD90, "X"), - (0xFD92, "M", "مجخ"), - (0xFD93, "M", "همج"), - (0xFD94, "M", "همم"), - (0xFD95, "M", "نحم"), - (0xFD96, "M", "نحى"), - (0xFD97, "M", "نجم"), - (0xFD99, "M", "نجى"), - (0xFD9A, "M", "نمي"), - (0xFD9B, "M", "نمى"), - (0xFD9C, "M", "يمم"), - (0xFD9E, "M", "بخي"), - (0xFD9F, "M", "تجي"), - (0xFDA0, "M", "تجى"), - (0xFDA1, "M", "تخي"), - (0xFDA2, "M", "تخى"), - (0xFDA3, "M", "تمي"), - (0xFDA4, "M", "تمى"), - (0xFDA5, "M", "جمي"), - (0xFDA6, "M", "جحى"), - (0xFDA7, "M", "جمى"), - (0xFDA8, "M", "سخى"), - (0xFDA9, "M", "صحي"), - (0xFDAA, "M", "شحي"), - ] - - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFDAB, "M", "ضحي"), - (0xFDAC, "M", "لجي"), - (0xFDAD, "M", "لمي"), - (0xFDAE, "M", "يحي"), - (0xFDAF, "M", "يجي"), - (0xFDB0, "M", "يمي"), - (0xFDB1, "M", "ممي"), - (0xFDB2, "M", "قمي"), - (0xFDB3, "M", "نحي"), - (0xFDB4, "M", "قمح"), - (0xFDB5, "M", "لحم"), - (0xFDB6, "M", "عمي"), - (0xFDB7, "M", "كمي"), - (0xFDB8, "M", "نجح"), - (0xFDB9, "M", "مخي"), - (0xFDBA, "M", "لجم"), - (0xFDBB, "M", "كمم"), - (0xFDBC, "M", "لجم"), - (0xFDBD, "M", "نجح"), - (0xFDBE, "M", "جحي"), - (0xFDBF, "M", "حجي"), - (0xFDC0, "M", "مجي"), - (0xFDC1, "M", "فمي"), - (0xFDC2, "M", "بحي"), - (0xFDC3, "M", "كمم"), - (0xFDC4, "M", "عجم"), - (0xFDC5, "M", "صمم"), - (0xFDC6, "M", "سخي"), - (0xFDC7, "M", "نجي"), - (0xFDC8, "X"), - (0xFDCF, "V"), - (0xFDD0, "X"), - (0xFDF0, "M", "صلے"), - (0xFDF1, "M", "قلے"), - (0xFDF2, "M", "الله"), - (0xFDF3, "M", "اكبر"), - (0xFDF4, "M", "محمد"), - (0xFDF5, "M", "صلعم"), - (0xFDF6, "M", "رسول"), - (0xFDF7, "M", "عليه"), - (0xFDF8, "M", "وسلم"), - (0xFDF9, "M", "صلى"), - (0xFDFA, "3", "صلى الله عليه وسلم"), - (0xFDFB, "3", "جل جلاله"), - (0xFDFC, "M", "ریال"), - (0xFDFD, "V"), - (0xFE00, "I"), - (0xFE10, "3", ","), - (0xFE11, "M", "、"), - (0xFE12, "X"), - (0xFE13, "3", ":"), - (0xFE14, "3", ";"), - (0xFE15, "3", "!"), - (0xFE16, "3", "?"), - (0xFE17, "M", "〖"), - (0xFE18, "M", "〗"), - (0xFE19, "X"), - (0xFE20, "V"), - (0xFE30, "X"), - (0xFE31, "M", "—"), - (0xFE32, "M", "–"), - (0xFE33, "3", "_"), - (0xFE35, "3", "("), - (0xFE36, "3", ")"), - (0xFE37, "3", "{"), - (0xFE38, "3", "}"), - (0xFE39, "M", "〔"), - (0xFE3A, "M", "〕"), - (0xFE3B, "M", "【"), - (0xFE3C, "M", "】"), - (0xFE3D, "M", "《"), - (0xFE3E, "M", "》"), - (0xFE3F, "M", "〈"), - (0xFE40, "M", "〉"), - (0xFE41, "M", "「"), - (0xFE42, "M", "」"), - (0xFE43, "M", "『"), - (0xFE44, "M", "』"), - (0xFE45, "V"), - (0xFE47, "3", "["), - (0xFE48, "3", "]"), - (0xFE49, "3", " ̅"), - (0xFE4D, "3", "_"), - (0xFE50, "3", ","), - (0xFE51, "M", "、"), - (0xFE52, "X"), - (0xFE54, "3", ";"), - (0xFE55, "3", ":"), - (0xFE56, "3", "?"), - (0xFE57, "3", "!"), - (0xFE58, "M", "—"), - (0xFE59, "3", "("), - (0xFE5A, "3", ")"), - (0xFE5B, "3", "{"), - (0xFE5C, "3", "}"), - (0xFE5D, "M", "〔"), - (0xFE5E, "M", "〕"), - (0xFE5F, "3", "#"), - (0xFE60, "3", "&"), - (0xFE61, "3", "*"), - ] - - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFE62, "3", "+"), - (0xFE63, "M", "-"), - (0xFE64, "3", "<"), - (0xFE65, "3", ">"), - (0xFE66, "3", "="), - (0xFE67, "X"), - (0xFE68, "3", "\\"), - (0xFE69, "3", "$"), - (0xFE6A, "3", "%"), - (0xFE6B, "3", "@"), - (0xFE6C, "X"), - (0xFE70, "3", " ً"), - (0xFE71, "M", "ـً"), - (0xFE72, "3", " ٌ"), - (0xFE73, "V"), - (0xFE74, "3", " ٍ"), - (0xFE75, "X"), - (0xFE76, "3", " َ"), - (0xFE77, "M", "ـَ"), - (0xFE78, "3", " ُ"), - (0xFE79, "M", "ـُ"), - (0xFE7A, "3", " ِ"), - (0xFE7B, "M", "ـِ"), - (0xFE7C, "3", " ّ"), - (0xFE7D, "M", "ـّ"), - (0xFE7E, "3", " ْ"), - (0xFE7F, "M", "ـْ"), - (0xFE80, "M", "ء"), - (0xFE81, "M", "آ"), - (0xFE83, "M", "أ"), - (0xFE85, "M", "ؤ"), - (0xFE87, "M", "إ"), - (0xFE89, "M", "ئ"), - (0xFE8D, "M", "ا"), - (0xFE8F, "M", "ب"), - (0xFE93, "M", "ة"), - (0xFE95, "M", "ت"), - (0xFE99, "M", "ث"), - (0xFE9D, "M", "ج"), - (0xFEA1, "M", "ح"), - (0xFEA5, "M", "خ"), - (0xFEA9, "M", "د"), - (0xFEAB, "M", "ذ"), - (0xFEAD, "M", "ر"), - (0xFEAF, "M", "ز"), - (0xFEB1, "M", "س"), - (0xFEB5, "M", "ش"), - (0xFEB9, "M", "ص"), - (0xFEBD, "M", "ض"), - (0xFEC1, "M", "ط"), - (0xFEC5, "M", "ظ"), - (0xFEC9, "M", "ع"), - (0xFECD, "M", "غ"), - (0xFED1, "M", "ف"), - (0xFED5, "M", "ق"), - (0xFED9, "M", "ك"), - (0xFEDD, "M", "ل"), - (0xFEE1, "M", "م"), - (0xFEE5, "M", "ن"), - (0xFEE9, "M", "ه"), - (0xFEED, "M", "و"), - (0xFEEF, "M", "ى"), - (0xFEF1, "M", "ي"), - (0xFEF5, "M", "لآ"), - (0xFEF7, "M", "لأ"), - (0xFEF9, "M", "لإ"), - (0xFEFB, "M", "لا"), - (0xFEFD, "X"), - (0xFEFF, "I"), - (0xFF00, "X"), - (0xFF01, "3", "!"), - (0xFF02, "3", '"'), - (0xFF03, "3", "#"), - (0xFF04, "3", "$"), - (0xFF05, "3", "%"), - (0xFF06, "3", "&"), - (0xFF07, "3", "'"), - (0xFF08, "3", "("), - (0xFF09, "3", ")"), - (0xFF0A, "3", "*"), - (0xFF0B, "3", "+"), - (0xFF0C, "3", ","), - (0xFF0D, "M", "-"), - (0xFF0E, "M", "."), - (0xFF0F, "3", "/"), - (0xFF10, "M", "0"), - (0xFF11, "M", "1"), - (0xFF12, "M", "2"), - (0xFF13, "M", "3"), - (0xFF14, "M", "4"), - (0xFF15, "M", "5"), - (0xFF16, "M", "6"), - (0xFF17, "M", "7"), - (0xFF18, "M", "8"), - (0xFF19, "M", "9"), - (0xFF1A, "3", ":"), - (0xFF1B, "3", ";"), - (0xFF1C, "3", "<"), - (0xFF1D, "3", "="), - (0xFF1E, "3", ">"), - ] - - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF1F, "3", "?"), - (0xFF20, "3", "@"), - (0xFF21, "M", "a"), - (0xFF22, "M", "b"), - (0xFF23, "M", "c"), - (0xFF24, "M", "d"), - (0xFF25, "M", "e"), - (0xFF26, "M", "f"), - (0xFF27, "M", "g"), - (0xFF28, "M", "h"), - (0xFF29, "M", "i"), - (0xFF2A, "M", "j"), - (0xFF2B, "M", "k"), - (0xFF2C, "M", "l"), - (0xFF2D, "M", "m"), - (0xFF2E, "M", "n"), - (0xFF2F, "M", "o"), - (0xFF30, "M", "p"), - (0xFF31, "M", "q"), - (0xFF32, "M", "r"), - (0xFF33, "M", "s"), - (0xFF34, "M", "t"), - (0xFF35, "M", "u"), - (0xFF36, "M", "v"), - (0xFF37, "M", "w"), - (0xFF38, "M", "x"), - (0xFF39, "M", "y"), - (0xFF3A, "M", "z"), - (0xFF3B, "3", "["), - (0xFF3C, "3", "\\"), - (0xFF3D, "3", "]"), - (0xFF3E, "3", "^"), - (0xFF3F, "3", "_"), - (0xFF40, "3", "`"), - (0xFF41, "M", "a"), - (0xFF42, "M", "b"), - (0xFF43, "M", "c"), - (0xFF44, "M", "d"), - (0xFF45, "M", "e"), - (0xFF46, "M", "f"), - (0xFF47, "M", "g"), - (0xFF48, "M", "h"), - (0xFF49, "M", "i"), - (0xFF4A, "M", "j"), - (0xFF4B, "M", "k"), - (0xFF4C, "M", "l"), - (0xFF4D, "M", "m"), - (0xFF4E, "M", "n"), - (0xFF4F, "M", "o"), - (0xFF50, "M", "p"), - (0xFF51, "M", "q"), - (0xFF52, "M", "r"), - (0xFF53, "M", "s"), - (0xFF54, "M", "t"), - (0xFF55, "M", "u"), - (0xFF56, "M", "v"), - (0xFF57, "M", "w"), - (0xFF58, "M", "x"), - (0xFF59, "M", "y"), - (0xFF5A, "M", "z"), - (0xFF5B, "3", "{"), - (0xFF5C, "3", "|"), - (0xFF5D, "3", "}"), - (0xFF5E, "3", "~"), - (0xFF5F, "M", "⦅"), - (0xFF60, "M", "⦆"), - (0xFF61, "M", "."), - (0xFF62, "M", "「"), - (0xFF63, "M", "」"), - (0xFF64, "M", "、"), - (0xFF65, "M", "・"), - (0xFF66, "M", "ヲ"), - (0xFF67, "M", "ァ"), - (0xFF68, "M", "ィ"), - (0xFF69, "M", "ゥ"), - (0xFF6A, "M", "ェ"), - (0xFF6B, "M", "ォ"), - (0xFF6C, "M", "ャ"), - (0xFF6D, "M", "ュ"), - (0xFF6E, "M", "ョ"), - (0xFF6F, "M", "ッ"), - (0xFF70, "M", "ー"), - (0xFF71, "M", "ア"), - (0xFF72, "M", "イ"), - (0xFF73, "M", "ウ"), - (0xFF74, "M", "エ"), - (0xFF75, "M", "オ"), - (0xFF76, "M", "カ"), - (0xFF77, "M", "キ"), - (0xFF78, "M", "ク"), - (0xFF79, "M", "ケ"), - (0xFF7A, "M", "コ"), - (0xFF7B, "M", "サ"), - (0xFF7C, "M", "シ"), - (0xFF7D, "M", "ス"), - (0xFF7E, "M", "セ"), - (0xFF7F, "M", "ソ"), - (0xFF80, "M", "タ"), - (0xFF81, "M", "チ"), - (0xFF82, "M", "ツ"), - ] - - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF83, "M", "テ"), - (0xFF84, "M", "ト"), - (0xFF85, "M", "ナ"), - (0xFF86, "M", "ニ"), - (0xFF87, "M", "ヌ"), - (0xFF88, "M", "ネ"), - (0xFF89, "M", "ノ"), - (0xFF8A, "M", "ハ"), - (0xFF8B, "M", "ヒ"), - (0xFF8C, "M", "フ"), - (0xFF8D, "M", "ヘ"), - (0xFF8E, "M", "ホ"), - (0xFF8F, "M", "マ"), - (0xFF90, "M", "ミ"), - (0xFF91, "M", "ム"), - (0xFF92, "M", "メ"), - (0xFF93, "M", "モ"), - (0xFF94, "M", "ヤ"), - (0xFF95, "M", "ユ"), - (0xFF96, "M", "ヨ"), - (0xFF97, "M", "ラ"), - (0xFF98, "M", "リ"), - (0xFF99, "M", "ル"), - (0xFF9A, "M", "レ"), - (0xFF9B, "M", "ロ"), - (0xFF9C, "M", "ワ"), - (0xFF9D, "M", "ン"), - (0xFF9E, "M", "゙"), - (0xFF9F, "M", "゚"), - (0xFFA0, "X"), - (0xFFA1, "M", "ᄀ"), - (0xFFA2, "M", "ᄁ"), - (0xFFA3, "M", "ᆪ"), - (0xFFA4, "M", "ᄂ"), - (0xFFA5, "M", "ᆬ"), - (0xFFA6, "M", "ᆭ"), - (0xFFA7, "M", "ᄃ"), - (0xFFA8, "M", "ᄄ"), - (0xFFA9, "M", "ᄅ"), - (0xFFAA, "M", "ᆰ"), - (0xFFAB, "M", "ᆱ"), - (0xFFAC, "M", "ᆲ"), - (0xFFAD, "M", "ᆳ"), - (0xFFAE, "M", "ᆴ"), - (0xFFAF, "M", "ᆵ"), - (0xFFB0, "M", "ᄚ"), - (0xFFB1, "M", "ᄆ"), - (0xFFB2, "M", "ᄇ"), - (0xFFB3, "M", "ᄈ"), - (0xFFB4, "M", "ᄡ"), - (0xFFB5, "M", "ᄉ"), - (0xFFB6, "M", "ᄊ"), - (0xFFB7, "M", "ᄋ"), - (0xFFB8, "M", "ᄌ"), - (0xFFB9, "M", "ᄍ"), - (0xFFBA, "M", "ᄎ"), - (0xFFBB, "M", "ᄏ"), - (0xFFBC, "M", "ᄐ"), - (0xFFBD, "M", "ᄑ"), - (0xFFBE, "M", "ᄒ"), - (0xFFBF, "X"), - (0xFFC2, "M", "ᅡ"), - (0xFFC3, "M", "ᅢ"), - (0xFFC4, "M", "ᅣ"), - (0xFFC5, "M", "ᅤ"), - (0xFFC6, "M", "ᅥ"), - (0xFFC7, "M", "ᅦ"), - (0xFFC8, "X"), - (0xFFCA, "M", "ᅧ"), - (0xFFCB, "M", "ᅨ"), - (0xFFCC, "M", "ᅩ"), - (0xFFCD, "M", "ᅪ"), - (0xFFCE, "M", "ᅫ"), - (0xFFCF, "M", "ᅬ"), - (0xFFD0, "X"), - (0xFFD2, "M", "ᅭ"), - (0xFFD3, "M", "ᅮ"), - (0xFFD4, "M", "ᅯ"), - (0xFFD5, "M", "ᅰ"), - (0xFFD6, "M", "ᅱ"), - (0xFFD7, "M", "ᅲ"), - (0xFFD8, "X"), - (0xFFDA, "M", "ᅳ"), - (0xFFDB, "M", "ᅴ"), - (0xFFDC, "M", "ᅵ"), - (0xFFDD, "X"), - (0xFFE0, "M", "¢"), - (0xFFE1, "M", "£"), - (0xFFE2, "M", "¬"), - (0xFFE3, "3", " ̄"), - (0xFFE4, "M", "¦"), - (0xFFE5, "M", "¥"), - (0xFFE6, "M", "₩"), - (0xFFE7, "X"), - (0xFFE8, "M", "│"), - (0xFFE9, "M", "←"), - (0xFFEA, "M", "↑"), - (0xFFEB, "M", "→"), - (0xFFEC, "M", "↓"), - (0xFFED, "M", "■"), - ] - - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFFEE, "M", "○"), - (0xFFEF, "X"), - (0x10000, "V"), - (0x1000C, "X"), - (0x1000D, "V"), - (0x10027, "X"), - (0x10028, "V"), - (0x1003B, "X"), - (0x1003C, "V"), - (0x1003E, "X"), - (0x1003F, "V"), - (0x1004E, "X"), - (0x10050, "V"), - (0x1005E, "X"), - (0x10080, "V"), - (0x100FB, "X"), - (0x10100, "V"), - (0x10103, "X"), - (0x10107, "V"), - (0x10134, "X"), - (0x10137, "V"), - (0x1018F, "X"), - (0x10190, "V"), - (0x1019D, "X"), - (0x101A0, "V"), - (0x101A1, "X"), - (0x101D0, "V"), - (0x101FE, "X"), - (0x10280, "V"), - (0x1029D, "X"), - (0x102A0, "V"), - (0x102D1, "X"), - (0x102E0, "V"), - (0x102FC, "X"), - (0x10300, "V"), - (0x10324, "X"), - (0x1032D, "V"), - (0x1034B, "X"), - (0x10350, "V"), - (0x1037B, "X"), - (0x10380, "V"), - (0x1039E, "X"), - (0x1039F, "V"), - (0x103C4, "X"), - (0x103C8, "V"), - (0x103D6, "X"), - (0x10400, "M", "𐐨"), - (0x10401, "M", "𐐩"), - (0x10402, "M", "𐐪"), - (0x10403, "M", "𐐫"), - (0x10404, "M", "𐐬"), - (0x10405, "M", "𐐭"), - (0x10406, "M", "𐐮"), - (0x10407, "M", "𐐯"), - (0x10408, "M", "𐐰"), - (0x10409, "M", "𐐱"), - (0x1040A, "M", "𐐲"), - (0x1040B, "M", "𐐳"), - (0x1040C, "M", "𐐴"), - (0x1040D, "M", "𐐵"), - (0x1040E, "M", "𐐶"), - (0x1040F, "M", "𐐷"), - (0x10410, "M", "𐐸"), - (0x10411, "M", "𐐹"), - (0x10412, "M", "𐐺"), - (0x10413, "M", "𐐻"), - (0x10414, "M", "𐐼"), - (0x10415, "M", "𐐽"), - (0x10416, "M", "𐐾"), - (0x10417, "M", "𐐿"), - (0x10418, "M", "𐑀"), - (0x10419, "M", "𐑁"), - (0x1041A, "M", "𐑂"), - (0x1041B, "M", "𐑃"), - (0x1041C, "M", "𐑄"), - (0x1041D, "M", "𐑅"), - (0x1041E, "M", "𐑆"), - (0x1041F, "M", "𐑇"), - (0x10420, "M", "𐑈"), - (0x10421, "M", "𐑉"), - (0x10422, "M", "𐑊"), - (0x10423, "M", "𐑋"), - (0x10424, "M", "𐑌"), - (0x10425, "M", "𐑍"), - (0x10426, "M", "𐑎"), - (0x10427, "M", "𐑏"), - (0x10428, "V"), - (0x1049E, "X"), - (0x104A0, "V"), - (0x104AA, "X"), - (0x104B0, "M", "𐓘"), - (0x104B1, "M", "𐓙"), - (0x104B2, "M", "𐓚"), - (0x104B3, "M", "𐓛"), - (0x104B4, "M", "𐓜"), - (0x104B5, "M", "𐓝"), - (0x104B6, "M", "𐓞"), - (0x104B7, "M", "𐓟"), - (0x104B8, "M", "𐓠"), - (0x104B9, "M", "𐓡"), - ] - - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x104BA, "M", "𐓢"), - (0x104BB, "M", "𐓣"), - (0x104BC, "M", "𐓤"), - (0x104BD, "M", "𐓥"), - (0x104BE, "M", "𐓦"), - (0x104BF, "M", "𐓧"), - (0x104C0, "M", "𐓨"), - (0x104C1, "M", "𐓩"), - (0x104C2, "M", "𐓪"), - (0x104C3, "M", "𐓫"), - (0x104C4, "M", "𐓬"), - (0x104C5, "M", "𐓭"), - (0x104C6, "M", "𐓮"), - (0x104C7, "M", "𐓯"), - (0x104C8, "M", "𐓰"), - (0x104C9, "M", "𐓱"), - (0x104CA, "M", "𐓲"), - (0x104CB, "M", "𐓳"), - (0x104CC, "M", "𐓴"), - (0x104CD, "M", "𐓵"), - (0x104CE, "M", "𐓶"), - (0x104CF, "M", "𐓷"), - (0x104D0, "M", "𐓸"), - (0x104D1, "M", "𐓹"), - (0x104D2, "M", "𐓺"), - (0x104D3, "M", "𐓻"), - (0x104D4, "X"), - (0x104D8, "V"), - (0x104FC, "X"), - (0x10500, "V"), - (0x10528, "X"), - (0x10530, "V"), - (0x10564, "X"), - (0x1056F, "V"), - (0x10570, "M", "𐖗"), - (0x10571, "M", "𐖘"), - (0x10572, "M", "𐖙"), - (0x10573, "M", "𐖚"), - (0x10574, "M", "𐖛"), - (0x10575, "M", "𐖜"), - (0x10576, "M", "𐖝"), - (0x10577, "M", "𐖞"), - (0x10578, "M", "𐖟"), - (0x10579, "M", "𐖠"), - (0x1057A, "M", "𐖡"), - (0x1057B, "X"), - (0x1057C, "M", "𐖣"), - (0x1057D, "M", "𐖤"), - (0x1057E, "M", "𐖥"), - (0x1057F, "M", "𐖦"), - (0x10580, "M", "𐖧"), - (0x10581, "M", "𐖨"), - (0x10582, "M", "𐖩"), - (0x10583, "M", "𐖪"), - (0x10584, "M", "𐖫"), - (0x10585, "M", "𐖬"), - (0x10586, "M", "𐖭"), - (0x10587, "M", "𐖮"), - (0x10588, "M", "𐖯"), - (0x10589, "M", "𐖰"), - (0x1058A, "M", "𐖱"), - (0x1058B, "X"), - (0x1058C, "M", "𐖳"), - (0x1058D, "M", "𐖴"), - (0x1058E, "M", "𐖵"), - (0x1058F, "M", "𐖶"), - (0x10590, "M", "𐖷"), - (0x10591, "M", "𐖸"), - (0x10592, "M", "𐖹"), - (0x10593, "X"), - (0x10594, "M", "𐖻"), - (0x10595, "M", "𐖼"), - (0x10596, "X"), - (0x10597, "V"), - (0x105A2, "X"), - (0x105A3, "V"), - (0x105B2, "X"), - (0x105B3, "V"), - (0x105BA, "X"), - (0x105BB, "V"), - (0x105BD, "X"), - (0x10600, "V"), - (0x10737, "X"), - (0x10740, "V"), - (0x10756, "X"), - (0x10760, "V"), - (0x10768, "X"), - (0x10780, "V"), - (0x10781, "M", "ː"), - (0x10782, "M", "ˑ"), - (0x10783, "M", "æ"), - (0x10784, "M", "ʙ"), - (0x10785, "M", "ɓ"), - (0x10786, "X"), - (0x10787, "M", "ʣ"), - (0x10788, "M", "ꭦ"), - (0x10789, "M", "ʥ"), - (0x1078A, "M", "ʤ"), - (0x1078B, "M", "ɖ"), - (0x1078C, "M", "ɗ"), - ] - - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1078D, "M", "ᶑ"), - (0x1078E, "M", "ɘ"), - (0x1078F, "M", "ɞ"), - (0x10790, "M", "ʩ"), - (0x10791, "M", "ɤ"), - (0x10792, "M", "ɢ"), - (0x10793, "M", "ɠ"), - (0x10794, "M", "ʛ"), - (0x10795, "M", "ħ"), - (0x10796, "M", "ʜ"), - (0x10797, "M", "ɧ"), - (0x10798, "M", "ʄ"), - (0x10799, "M", "ʪ"), - (0x1079A, "M", "ʫ"), - (0x1079B, "M", "ɬ"), - (0x1079C, "M", "𝼄"), - (0x1079D, "M", "ꞎ"), - (0x1079E, "M", "ɮ"), - (0x1079F, "M", "𝼅"), - (0x107A0, "M", "ʎ"), - (0x107A1, "M", "𝼆"), - (0x107A2, "M", "ø"), - (0x107A3, "M", "ɶ"), - (0x107A4, "M", "ɷ"), - (0x107A5, "M", "q"), - (0x107A6, "M", "ɺ"), - (0x107A7, "M", "𝼈"), - (0x107A8, "M", "ɽ"), - (0x107A9, "M", "ɾ"), - (0x107AA, "M", "ʀ"), - (0x107AB, "M", "ʨ"), - (0x107AC, "M", "ʦ"), - (0x107AD, "M", "ꭧ"), - (0x107AE, "M", "ʧ"), - (0x107AF, "M", "ʈ"), - (0x107B0, "M", "ⱱ"), - (0x107B1, "X"), - (0x107B2, "M", "ʏ"), - (0x107B3, "M", "ʡ"), - (0x107B4, "M", "ʢ"), - (0x107B5, "M", "ʘ"), - (0x107B6, "M", "ǀ"), - (0x107B7, "M", "ǁ"), - (0x107B8, "M", "ǂ"), - (0x107B9, "M", "𝼊"), - (0x107BA, "M", "𝼞"), - (0x107BB, "X"), - (0x10800, "V"), - (0x10806, "X"), - (0x10808, "V"), - (0x10809, "X"), - (0x1080A, "V"), - (0x10836, "X"), - (0x10837, "V"), - (0x10839, "X"), - (0x1083C, "V"), - (0x1083D, "X"), - (0x1083F, "V"), - (0x10856, "X"), - (0x10857, "V"), - (0x1089F, "X"), - (0x108A7, "V"), - (0x108B0, "X"), - (0x108E0, "V"), - (0x108F3, "X"), - (0x108F4, "V"), - (0x108F6, "X"), - (0x108FB, "V"), - (0x1091C, "X"), - (0x1091F, "V"), - (0x1093A, "X"), - (0x1093F, "V"), - (0x10940, "X"), - (0x10980, "V"), - (0x109B8, "X"), - (0x109BC, "V"), - (0x109D0, "X"), - (0x109D2, "V"), - (0x10A04, "X"), - (0x10A05, "V"), - (0x10A07, "X"), - (0x10A0C, "V"), - (0x10A14, "X"), - (0x10A15, "V"), - (0x10A18, "X"), - (0x10A19, "V"), - (0x10A36, "X"), - (0x10A38, "V"), - (0x10A3B, "X"), - (0x10A3F, "V"), - (0x10A49, "X"), - (0x10A50, "V"), - (0x10A59, "X"), - (0x10A60, "V"), - (0x10AA0, "X"), - (0x10AC0, "V"), - (0x10AE7, "X"), - (0x10AEB, "V"), - (0x10AF7, "X"), - (0x10B00, "V"), - ] - - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10B36, "X"), - (0x10B39, "V"), - (0x10B56, "X"), - (0x10B58, "V"), - (0x10B73, "X"), - (0x10B78, "V"), - (0x10B92, "X"), - (0x10B99, "V"), - (0x10B9D, "X"), - (0x10BA9, "V"), - (0x10BB0, "X"), - (0x10C00, "V"), - (0x10C49, "X"), - (0x10C80, "M", "𐳀"), - (0x10C81, "M", "𐳁"), - (0x10C82, "M", "𐳂"), - (0x10C83, "M", "𐳃"), - (0x10C84, "M", "𐳄"), - (0x10C85, "M", "𐳅"), - (0x10C86, "M", "𐳆"), - (0x10C87, "M", "𐳇"), - (0x10C88, "M", "𐳈"), - (0x10C89, "M", "𐳉"), - (0x10C8A, "M", "𐳊"), - (0x10C8B, "M", "𐳋"), - (0x10C8C, "M", "𐳌"), - (0x10C8D, "M", "𐳍"), - (0x10C8E, "M", "𐳎"), - (0x10C8F, "M", "𐳏"), - (0x10C90, "M", "𐳐"), - (0x10C91, "M", "𐳑"), - (0x10C92, "M", "𐳒"), - (0x10C93, "M", "𐳓"), - (0x10C94, "M", "𐳔"), - (0x10C95, "M", "𐳕"), - (0x10C96, "M", "𐳖"), - (0x10C97, "M", "𐳗"), - (0x10C98, "M", "𐳘"), - (0x10C99, "M", "𐳙"), - (0x10C9A, "M", "𐳚"), - (0x10C9B, "M", "𐳛"), - (0x10C9C, "M", "𐳜"), - (0x10C9D, "M", "𐳝"), - (0x10C9E, "M", "𐳞"), - (0x10C9F, "M", "𐳟"), - (0x10CA0, "M", "𐳠"), - (0x10CA1, "M", "𐳡"), - (0x10CA2, "M", "𐳢"), - (0x10CA3, "M", "𐳣"), - (0x10CA4, "M", "𐳤"), - (0x10CA5, "M", "𐳥"), - (0x10CA6, "M", "𐳦"), - (0x10CA7, "M", "𐳧"), - (0x10CA8, "M", "𐳨"), - (0x10CA9, "M", "𐳩"), - (0x10CAA, "M", "𐳪"), - (0x10CAB, "M", "𐳫"), - (0x10CAC, "M", "𐳬"), - (0x10CAD, "M", "𐳭"), - (0x10CAE, "M", "𐳮"), - (0x10CAF, "M", "𐳯"), - (0x10CB0, "M", "𐳰"), - (0x10CB1, "M", "𐳱"), - (0x10CB2, "M", "𐳲"), - (0x10CB3, "X"), - (0x10CC0, "V"), - (0x10CF3, "X"), - (0x10CFA, "V"), - (0x10D28, "X"), - (0x10D30, "V"), - (0x10D3A, "X"), - (0x10E60, "V"), - (0x10E7F, "X"), - (0x10E80, "V"), - (0x10EAA, "X"), - (0x10EAB, "V"), - (0x10EAE, "X"), - (0x10EB0, "V"), - (0x10EB2, "X"), - (0x10EFD, "V"), - (0x10F28, "X"), - (0x10F30, "V"), - (0x10F5A, "X"), - (0x10F70, "V"), - (0x10F8A, "X"), - (0x10FB0, "V"), - (0x10FCC, "X"), - (0x10FE0, "V"), - (0x10FF7, "X"), - (0x11000, "V"), - (0x1104E, "X"), - (0x11052, "V"), - (0x11076, "X"), - (0x1107F, "V"), - (0x110BD, "X"), - (0x110BE, "V"), - (0x110C3, "X"), - (0x110D0, "V"), - (0x110E9, "X"), - (0x110F0, "V"), - ] - - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x110FA, "X"), - (0x11100, "V"), - (0x11135, "X"), - (0x11136, "V"), - (0x11148, "X"), - (0x11150, "V"), - (0x11177, "X"), - (0x11180, "V"), - (0x111E0, "X"), - (0x111E1, "V"), - (0x111F5, "X"), - (0x11200, "V"), - (0x11212, "X"), - (0x11213, "V"), - (0x11242, "X"), - (0x11280, "V"), - (0x11287, "X"), - (0x11288, "V"), - (0x11289, "X"), - (0x1128A, "V"), - (0x1128E, "X"), - (0x1128F, "V"), - (0x1129E, "X"), - (0x1129F, "V"), - (0x112AA, "X"), - (0x112B0, "V"), - (0x112EB, "X"), - (0x112F0, "V"), - (0x112FA, "X"), - (0x11300, "V"), - (0x11304, "X"), - (0x11305, "V"), - (0x1130D, "X"), - (0x1130F, "V"), - (0x11311, "X"), - (0x11313, "V"), - (0x11329, "X"), - (0x1132A, "V"), - (0x11331, "X"), - (0x11332, "V"), - (0x11334, "X"), - (0x11335, "V"), - (0x1133A, "X"), - (0x1133B, "V"), - (0x11345, "X"), - (0x11347, "V"), - (0x11349, "X"), - (0x1134B, "V"), - (0x1134E, "X"), - (0x11350, "V"), - (0x11351, "X"), - (0x11357, "V"), - (0x11358, "X"), - (0x1135D, "V"), - (0x11364, "X"), - (0x11366, "V"), - (0x1136D, "X"), - (0x11370, "V"), - (0x11375, "X"), - (0x11400, "V"), - (0x1145C, "X"), - (0x1145D, "V"), - (0x11462, "X"), - (0x11480, "V"), - (0x114C8, "X"), - (0x114D0, "V"), - (0x114DA, "X"), - (0x11580, "V"), - (0x115B6, "X"), - (0x115B8, "V"), - (0x115DE, "X"), - (0x11600, "V"), - (0x11645, "X"), - (0x11650, "V"), - (0x1165A, "X"), - (0x11660, "V"), - (0x1166D, "X"), - (0x11680, "V"), - (0x116BA, "X"), - (0x116C0, "V"), - (0x116CA, "X"), - (0x11700, "V"), - (0x1171B, "X"), - (0x1171D, "V"), - (0x1172C, "X"), - (0x11730, "V"), - (0x11747, "X"), - (0x11800, "V"), - (0x1183C, "X"), - (0x118A0, "M", "𑣀"), - (0x118A1, "M", "𑣁"), - (0x118A2, "M", "𑣂"), - (0x118A3, "M", "𑣃"), - (0x118A4, "M", "𑣄"), - (0x118A5, "M", "𑣅"), - (0x118A6, "M", "𑣆"), - (0x118A7, "M", "𑣇"), - (0x118A8, "M", "𑣈"), - (0x118A9, "M", "𑣉"), - (0x118AA, "M", "𑣊"), - ] - - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x118AB, "M", "𑣋"), - (0x118AC, "M", "𑣌"), - (0x118AD, "M", "𑣍"), - (0x118AE, "M", "𑣎"), - (0x118AF, "M", "𑣏"), - (0x118B0, "M", "𑣐"), - (0x118B1, "M", "𑣑"), - (0x118B2, "M", "𑣒"), - (0x118B3, "M", "𑣓"), - (0x118B4, "M", "𑣔"), - (0x118B5, "M", "𑣕"), - (0x118B6, "M", "𑣖"), - (0x118B7, "M", "𑣗"), - (0x118B8, "M", "𑣘"), - (0x118B9, "M", "𑣙"), - (0x118BA, "M", "𑣚"), - (0x118BB, "M", "𑣛"), - (0x118BC, "M", "𑣜"), - (0x118BD, "M", "𑣝"), - (0x118BE, "M", "𑣞"), - (0x118BF, "M", "𑣟"), - (0x118C0, "V"), - (0x118F3, "X"), - (0x118FF, "V"), - (0x11907, "X"), - (0x11909, "V"), - (0x1190A, "X"), - (0x1190C, "V"), - (0x11914, "X"), - (0x11915, "V"), - (0x11917, "X"), - (0x11918, "V"), - (0x11936, "X"), - (0x11937, "V"), - (0x11939, "X"), - (0x1193B, "V"), - (0x11947, "X"), - (0x11950, "V"), - (0x1195A, "X"), - (0x119A0, "V"), - (0x119A8, "X"), - (0x119AA, "V"), - (0x119D8, "X"), - (0x119DA, "V"), - (0x119E5, "X"), - (0x11A00, "V"), - (0x11A48, "X"), - (0x11A50, "V"), - (0x11AA3, "X"), - (0x11AB0, "V"), - (0x11AF9, "X"), - (0x11B00, "V"), - (0x11B0A, "X"), - (0x11C00, "V"), - (0x11C09, "X"), - (0x11C0A, "V"), - (0x11C37, "X"), - (0x11C38, "V"), - (0x11C46, "X"), - (0x11C50, "V"), - (0x11C6D, "X"), - (0x11C70, "V"), - (0x11C90, "X"), - (0x11C92, "V"), - (0x11CA8, "X"), - (0x11CA9, "V"), - (0x11CB7, "X"), - (0x11D00, "V"), - (0x11D07, "X"), - (0x11D08, "V"), - (0x11D0A, "X"), - (0x11D0B, "V"), - (0x11D37, "X"), - (0x11D3A, "V"), - (0x11D3B, "X"), - (0x11D3C, "V"), - (0x11D3E, "X"), - (0x11D3F, "V"), - (0x11D48, "X"), - (0x11D50, "V"), - (0x11D5A, "X"), - (0x11D60, "V"), - (0x11D66, "X"), - (0x11D67, "V"), - (0x11D69, "X"), - (0x11D6A, "V"), - (0x11D8F, "X"), - (0x11D90, "V"), - (0x11D92, "X"), - (0x11D93, "V"), - (0x11D99, "X"), - (0x11DA0, "V"), - (0x11DAA, "X"), - (0x11EE0, "V"), - (0x11EF9, "X"), - (0x11F00, "V"), - (0x11F11, "X"), - (0x11F12, "V"), - (0x11F3B, "X"), - (0x11F3E, "V"), - ] - - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11F5A, "X"), - (0x11FB0, "V"), - (0x11FB1, "X"), - (0x11FC0, "V"), - (0x11FF2, "X"), - (0x11FFF, "V"), - (0x1239A, "X"), - (0x12400, "V"), - (0x1246F, "X"), - (0x12470, "V"), - (0x12475, "X"), - (0x12480, "V"), - (0x12544, "X"), - (0x12F90, "V"), - (0x12FF3, "X"), - (0x13000, "V"), - (0x13430, "X"), - (0x13440, "V"), - (0x13456, "X"), - (0x14400, "V"), - (0x14647, "X"), - (0x16800, "V"), - (0x16A39, "X"), - (0x16A40, "V"), - (0x16A5F, "X"), - (0x16A60, "V"), - (0x16A6A, "X"), - (0x16A6E, "V"), - (0x16ABF, "X"), - (0x16AC0, "V"), - (0x16ACA, "X"), - (0x16AD0, "V"), - (0x16AEE, "X"), - (0x16AF0, "V"), - (0x16AF6, "X"), - (0x16B00, "V"), - (0x16B46, "X"), - (0x16B50, "V"), - (0x16B5A, "X"), - (0x16B5B, "V"), - (0x16B62, "X"), - (0x16B63, "V"), - (0x16B78, "X"), - (0x16B7D, "V"), - (0x16B90, "X"), - (0x16E40, "M", "𖹠"), - (0x16E41, "M", "𖹡"), - (0x16E42, "M", "𖹢"), - (0x16E43, "M", "𖹣"), - (0x16E44, "M", "𖹤"), - (0x16E45, "M", "𖹥"), - (0x16E46, "M", "𖹦"), - (0x16E47, "M", "𖹧"), - (0x16E48, "M", "𖹨"), - (0x16E49, "M", "𖹩"), - (0x16E4A, "M", "𖹪"), - (0x16E4B, "M", "𖹫"), - (0x16E4C, "M", "𖹬"), - (0x16E4D, "M", "𖹭"), - (0x16E4E, "M", "𖹮"), - (0x16E4F, "M", "𖹯"), - (0x16E50, "M", "𖹰"), - (0x16E51, "M", "𖹱"), - (0x16E52, "M", "𖹲"), - (0x16E53, "M", "𖹳"), - (0x16E54, "M", "𖹴"), - (0x16E55, "M", "𖹵"), - (0x16E56, "M", "𖹶"), - (0x16E57, "M", "𖹷"), - (0x16E58, "M", "𖹸"), - (0x16E59, "M", "𖹹"), - (0x16E5A, "M", "𖹺"), - (0x16E5B, "M", "𖹻"), - (0x16E5C, "M", "𖹼"), - (0x16E5D, "M", "𖹽"), - (0x16E5E, "M", "𖹾"), - (0x16E5F, "M", "𖹿"), - (0x16E60, "V"), - (0x16E9B, "X"), - (0x16F00, "V"), - (0x16F4B, "X"), - (0x16F4F, "V"), - (0x16F88, "X"), - (0x16F8F, "V"), - (0x16FA0, "X"), - (0x16FE0, "V"), - (0x16FE5, "X"), - (0x16FF0, "V"), - (0x16FF2, "X"), - (0x17000, "V"), - (0x187F8, "X"), - (0x18800, "V"), - (0x18CD6, "X"), - (0x18D00, "V"), - (0x18D09, "X"), - (0x1AFF0, "V"), - (0x1AFF4, "X"), - (0x1AFF5, "V"), - (0x1AFFC, "X"), - (0x1AFFD, "V"), - ] - - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1AFFF, "X"), - (0x1B000, "V"), - (0x1B123, "X"), - (0x1B132, "V"), - (0x1B133, "X"), - (0x1B150, "V"), - (0x1B153, "X"), - (0x1B155, "V"), - (0x1B156, "X"), - (0x1B164, "V"), - (0x1B168, "X"), - (0x1B170, "V"), - (0x1B2FC, "X"), - (0x1BC00, "V"), - (0x1BC6B, "X"), - (0x1BC70, "V"), - (0x1BC7D, "X"), - (0x1BC80, "V"), - (0x1BC89, "X"), - (0x1BC90, "V"), - (0x1BC9A, "X"), - (0x1BC9C, "V"), - (0x1BCA0, "I"), - (0x1BCA4, "X"), - (0x1CF00, "V"), - (0x1CF2E, "X"), - (0x1CF30, "V"), - (0x1CF47, "X"), - (0x1CF50, "V"), - (0x1CFC4, "X"), - (0x1D000, "V"), - (0x1D0F6, "X"), - (0x1D100, "V"), - (0x1D127, "X"), - (0x1D129, "V"), - (0x1D15E, "M", "𝅗𝅥"), - (0x1D15F, "M", "𝅘𝅥"), - (0x1D160, "M", "𝅘𝅥𝅮"), - (0x1D161, "M", "𝅘𝅥𝅯"), - (0x1D162, "M", "𝅘𝅥𝅰"), - (0x1D163, "M", "𝅘𝅥𝅱"), - (0x1D164, "M", "𝅘𝅥𝅲"), - (0x1D165, "V"), - (0x1D173, "X"), - (0x1D17B, "V"), - (0x1D1BB, "M", "𝆹𝅥"), - (0x1D1BC, "M", "𝆺𝅥"), - (0x1D1BD, "M", "𝆹𝅥𝅮"), - (0x1D1BE, "M", "𝆺𝅥𝅮"), - (0x1D1BF, "M", "𝆹𝅥𝅯"), - (0x1D1C0, "M", "𝆺𝅥𝅯"), - (0x1D1C1, "V"), - (0x1D1EB, "X"), - (0x1D200, "V"), - (0x1D246, "X"), - (0x1D2C0, "V"), - (0x1D2D4, "X"), - (0x1D2E0, "V"), - (0x1D2F4, "X"), - (0x1D300, "V"), - (0x1D357, "X"), - (0x1D360, "V"), - (0x1D379, "X"), - (0x1D400, "M", "a"), - (0x1D401, "M", "b"), - (0x1D402, "M", "c"), - (0x1D403, "M", "d"), - (0x1D404, "M", "e"), - (0x1D405, "M", "f"), - (0x1D406, "M", "g"), - (0x1D407, "M", "h"), - (0x1D408, "M", "i"), - (0x1D409, "M", "j"), - (0x1D40A, "M", "k"), - (0x1D40B, "M", "l"), - (0x1D40C, "M", "m"), - (0x1D40D, "M", "n"), - (0x1D40E, "M", "o"), - (0x1D40F, "M", "p"), - (0x1D410, "M", "q"), - (0x1D411, "M", "r"), - (0x1D412, "M", "s"), - (0x1D413, "M", "t"), - (0x1D414, "M", "u"), - (0x1D415, "M", "v"), - (0x1D416, "M", "w"), - (0x1D417, "M", "x"), - (0x1D418, "M", "y"), - (0x1D419, "M", "z"), - (0x1D41A, "M", "a"), - (0x1D41B, "M", "b"), - (0x1D41C, "M", "c"), - (0x1D41D, "M", "d"), - (0x1D41E, "M", "e"), - (0x1D41F, "M", "f"), - (0x1D420, "M", "g"), - (0x1D421, "M", "h"), - (0x1D422, "M", "i"), - (0x1D423, "M", "j"), - (0x1D424, "M", "k"), - ] - - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D425, "M", "l"), - (0x1D426, "M", "m"), - (0x1D427, "M", "n"), - (0x1D428, "M", "o"), - (0x1D429, "M", "p"), - (0x1D42A, "M", "q"), - (0x1D42B, "M", "r"), - (0x1D42C, "M", "s"), - (0x1D42D, "M", "t"), - (0x1D42E, "M", "u"), - (0x1D42F, "M", "v"), - (0x1D430, "M", "w"), - (0x1D431, "M", "x"), - (0x1D432, "M", "y"), - (0x1D433, "M", "z"), - (0x1D434, "M", "a"), - (0x1D435, "M", "b"), - (0x1D436, "M", "c"), - (0x1D437, "M", "d"), - (0x1D438, "M", "e"), - (0x1D439, "M", "f"), - (0x1D43A, "M", "g"), - (0x1D43B, "M", "h"), - (0x1D43C, "M", "i"), - (0x1D43D, "M", "j"), - (0x1D43E, "M", "k"), - (0x1D43F, "M", "l"), - (0x1D440, "M", "m"), - (0x1D441, "M", "n"), - (0x1D442, "M", "o"), - (0x1D443, "M", "p"), - (0x1D444, "M", "q"), - (0x1D445, "M", "r"), - (0x1D446, "M", "s"), - (0x1D447, "M", "t"), - (0x1D448, "M", "u"), - (0x1D449, "M", "v"), - (0x1D44A, "M", "w"), - (0x1D44B, "M", "x"), - (0x1D44C, "M", "y"), - (0x1D44D, "M", "z"), - (0x1D44E, "M", "a"), - (0x1D44F, "M", "b"), - (0x1D450, "M", "c"), - (0x1D451, "M", "d"), - (0x1D452, "M", "e"), - (0x1D453, "M", "f"), - (0x1D454, "M", "g"), - (0x1D455, "X"), - (0x1D456, "M", "i"), - (0x1D457, "M", "j"), - (0x1D458, "M", "k"), - (0x1D459, "M", "l"), - (0x1D45A, "M", "m"), - (0x1D45B, "M", "n"), - (0x1D45C, "M", "o"), - (0x1D45D, "M", "p"), - (0x1D45E, "M", "q"), - (0x1D45F, "M", "r"), - (0x1D460, "M", "s"), - (0x1D461, "M", "t"), - (0x1D462, "M", "u"), - (0x1D463, "M", "v"), - (0x1D464, "M", "w"), - (0x1D465, "M", "x"), - (0x1D466, "M", "y"), - (0x1D467, "M", "z"), - (0x1D468, "M", "a"), - (0x1D469, "M", "b"), - (0x1D46A, "M", "c"), - (0x1D46B, "M", "d"), - (0x1D46C, "M", "e"), - (0x1D46D, "M", "f"), - (0x1D46E, "M", "g"), - (0x1D46F, "M", "h"), - (0x1D470, "M", "i"), - (0x1D471, "M", "j"), - (0x1D472, "M", "k"), - (0x1D473, "M", "l"), - (0x1D474, "M", "m"), - (0x1D475, "M", "n"), - (0x1D476, "M", "o"), - (0x1D477, "M", "p"), - (0x1D478, "M", "q"), - (0x1D479, "M", "r"), - (0x1D47A, "M", "s"), - (0x1D47B, "M", "t"), - (0x1D47C, "M", "u"), - (0x1D47D, "M", "v"), - (0x1D47E, "M", "w"), - (0x1D47F, "M", "x"), - (0x1D480, "M", "y"), - (0x1D481, "M", "z"), - (0x1D482, "M", "a"), - (0x1D483, "M", "b"), - (0x1D484, "M", "c"), - (0x1D485, "M", "d"), - (0x1D486, "M", "e"), - (0x1D487, "M", "f"), - (0x1D488, "M", "g"), - ] - - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D489, "M", "h"), - (0x1D48A, "M", "i"), - (0x1D48B, "M", "j"), - (0x1D48C, "M", "k"), - (0x1D48D, "M", "l"), - (0x1D48E, "M", "m"), - (0x1D48F, "M", "n"), - (0x1D490, "M", "o"), - (0x1D491, "M", "p"), - (0x1D492, "M", "q"), - (0x1D493, "M", "r"), - (0x1D494, "M", "s"), - (0x1D495, "M", "t"), - (0x1D496, "M", "u"), - (0x1D497, "M", "v"), - (0x1D498, "M", "w"), - (0x1D499, "M", "x"), - (0x1D49A, "M", "y"), - (0x1D49B, "M", "z"), - (0x1D49C, "M", "a"), - (0x1D49D, "X"), - (0x1D49E, "M", "c"), - (0x1D49F, "M", "d"), - (0x1D4A0, "X"), - (0x1D4A2, "M", "g"), - (0x1D4A3, "X"), - (0x1D4A5, "M", "j"), - (0x1D4A6, "M", "k"), - (0x1D4A7, "X"), - (0x1D4A9, "M", "n"), - (0x1D4AA, "M", "o"), - (0x1D4AB, "M", "p"), - (0x1D4AC, "M", "q"), - (0x1D4AD, "X"), - (0x1D4AE, "M", "s"), - (0x1D4AF, "M", "t"), - (0x1D4B0, "M", "u"), - (0x1D4B1, "M", "v"), - (0x1D4B2, "M", "w"), - (0x1D4B3, "M", "x"), - (0x1D4B4, "M", "y"), - (0x1D4B5, "M", "z"), - (0x1D4B6, "M", "a"), - (0x1D4B7, "M", "b"), - (0x1D4B8, "M", "c"), - (0x1D4B9, "M", "d"), - (0x1D4BA, "X"), - (0x1D4BB, "M", "f"), - (0x1D4BC, "X"), - (0x1D4BD, "M", "h"), - (0x1D4BE, "M", "i"), - (0x1D4BF, "M", "j"), - (0x1D4C0, "M", "k"), - (0x1D4C1, "M", "l"), - (0x1D4C2, "M", "m"), - (0x1D4C3, "M", "n"), - (0x1D4C4, "X"), - (0x1D4C5, "M", "p"), - (0x1D4C6, "M", "q"), - (0x1D4C7, "M", "r"), - (0x1D4C8, "M", "s"), - (0x1D4C9, "M", "t"), - (0x1D4CA, "M", "u"), - (0x1D4CB, "M", "v"), - (0x1D4CC, "M", "w"), - (0x1D4CD, "M", "x"), - (0x1D4CE, "M", "y"), - (0x1D4CF, "M", "z"), - (0x1D4D0, "M", "a"), - (0x1D4D1, "M", "b"), - (0x1D4D2, "M", "c"), - (0x1D4D3, "M", "d"), - (0x1D4D4, "M", "e"), - (0x1D4D5, "M", "f"), - (0x1D4D6, "M", "g"), - (0x1D4D7, "M", "h"), - (0x1D4D8, "M", "i"), - (0x1D4D9, "M", "j"), - (0x1D4DA, "M", "k"), - (0x1D4DB, "M", "l"), - (0x1D4DC, "M", "m"), - (0x1D4DD, "M", "n"), - (0x1D4DE, "M", "o"), - (0x1D4DF, "M", "p"), - (0x1D4E0, "M", "q"), - (0x1D4E1, "M", "r"), - (0x1D4E2, "M", "s"), - (0x1D4E3, "M", "t"), - (0x1D4E4, "M", "u"), - (0x1D4E5, "M", "v"), - (0x1D4E6, "M", "w"), - (0x1D4E7, "M", "x"), - (0x1D4E8, "M", "y"), - (0x1D4E9, "M", "z"), - (0x1D4EA, "M", "a"), - (0x1D4EB, "M", "b"), - (0x1D4EC, "M", "c"), - (0x1D4ED, "M", "d"), - (0x1D4EE, "M", "e"), - (0x1D4EF, "M", "f"), - ] - - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D4F0, "M", "g"), - (0x1D4F1, "M", "h"), - (0x1D4F2, "M", "i"), - (0x1D4F3, "M", "j"), - (0x1D4F4, "M", "k"), - (0x1D4F5, "M", "l"), - (0x1D4F6, "M", "m"), - (0x1D4F7, "M", "n"), - (0x1D4F8, "M", "o"), - (0x1D4F9, "M", "p"), - (0x1D4FA, "M", "q"), - (0x1D4FB, "M", "r"), - (0x1D4FC, "M", "s"), - (0x1D4FD, "M", "t"), - (0x1D4FE, "M", "u"), - (0x1D4FF, "M", "v"), - (0x1D500, "M", "w"), - (0x1D501, "M", "x"), - (0x1D502, "M", "y"), - (0x1D503, "M", "z"), - (0x1D504, "M", "a"), - (0x1D505, "M", "b"), - (0x1D506, "X"), - (0x1D507, "M", "d"), - (0x1D508, "M", "e"), - (0x1D509, "M", "f"), - (0x1D50A, "M", "g"), - (0x1D50B, "X"), - (0x1D50D, "M", "j"), - (0x1D50E, "M", "k"), - (0x1D50F, "M", "l"), - (0x1D510, "M", "m"), - (0x1D511, "M", "n"), - (0x1D512, "M", "o"), - (0x1D513, "M", "p"), - (0x1D514, "M", "q"), - (0x1D515, "X"), - (0x1D516, "M", "s"), - (0x1D517, "M", "t"), - (0x1D518, "M", "u"), - (0x1D519, "M", "v"), - (0x1D51A, "M", "w"), - (0x1D51B, "M", "x"), - (0x1D51C, "M", "y"), - (0x1D51D, "X"), - (0x1D51E, "M", "a"), - (0x1D51F, "M", "b"), - (0x1D520, "M", "c"), - (0x1D521, "M", "d"), - (0x1D522, "M", "e"), - (0x1D523, "M", "f"), - (0x1D524, "M", "g"), - (0x1D525, "M", "h"), - (0x1D526, "M", "i"), - (0x1D527, "M", "j"), - (0x1D528, "M", "k"), - (0x1D529, "M", "l"), - (0x1D52A, "M", "m"), - (0x1D52B, "M", "n"), - (0x1D52C, "M", "o"), - (0x1D52D, "M", "p"), - (0x1D52E, "M", "q"), - (0x1D52F, "M", "r"), - (0x1D530, "M", "s"), - (0x1D531, "M", "t"), - (0x1D532, "M", "u"), - (0x1D533, "M", "v"), - (0x1D534, "M", "w"), - (0x1D535, "M", "x"), - (0x1D536, "M", "y"), - (0x1D537, "M", "z"), - (0x1D538, "M", "a"), - (0x1D539, "M", "b"), - (0x1D53A, "X"), - (0x1D53B, "M", "d"), - (0x1D53C, "M", "e"), - (0x1D53D, "M", "f"), - (0x1D53E, "M", "g"), - (0x1D53F, "X"), - (0x1D540, "M", "i"), - (0x1D541, "M", "j"), - (0x1D542, "M", "k"), - (0x1D543, "M", "l"), - (0x1D544, "M", "m"), - (0x1D545, "X"), - (0x1D546, "M", "o"), - (0x1D547, "X"), - (0x1D54A, "M", "s"), - (0x1D54B, "M", "t"), - (0x1D54C, "M", "u"), - (0x1D54D, "M", "v"), - (0x1D54E, "M", "w"), - (0x1D54F, "M", "x"), - (0x1D550, "M", "y"), - (0x1D551, "X"), - (0x1D552, "M", "a"), - (0x1D553, "M", "b"), - (0x1D554, "M", "c"), - (0x1D555, "M", "d"), - (0x1D556, "M", "e"), - ] - - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D557, "M", "f"), - (0x1D558, "M", "g"), - (0x1D559, "M", "h"), - (0x1D55A, "M", "i"), - (0x1D55B, "M", "j"), - (0x1D55C, "M", "k"), - (0x1D55D, "M", "l"), - (0x1D55E, "M", "m"), - (0x1D55F, "M", "n"), - (0x1D560, "M", "o"), - (0x1D561, "M", "p"), - (0x1D562, "M", "q"), - (0x1D563, "M", "r"), - (0x1D564, "M", "s"), - (0x1D565, "M", "t"), - (0x1D566, "M", "u"), - (0x1D567, "M", "v"), - (0x1D568, "M", "w"), - (0x1D569, "M", "x"), - (0x1D56A, "M", "y"), - (0x1D56B, "M", "z"), - (0x1D56C, "M", "a"), - (0x1D56D, "M", "b"), - (0x1D56E, "M", "c"), - (0x1D56F, "M", "d"), - (0x1D570, "M", "e"), - (0x1D571, "M", "f"), - (0x1D572, "M", "g"), - (0x1D573, "M", "h"), - (0x1D574, "M", "i"), - (0x1D575, "M", "j"), - (0x1D576, "M", "k"), - (0x1D577, "M", "l"), - (0x1D578, "M", "m"), - (0x1D579, "M", "n"), - (0x1D57A, "M", "o"), - (0x1D57B, "M", "p"), - (0x1D57C, "M", "q"), - (0x1D57D, "M", "r"), - (0x1D57E, "M", "s"), - (0x1D57F, "M", "t"), - (0x1D580, "M", "u"), - (0x1D581, "M", "v"), - (0x1D582, "M", "w"), - (0x1D583, "M", "x"), - (0x1D584, "M", "y"), - (0x1D585, "M", "z"), - (0x1D586, "M", "a"), - (0x1D587, "M", "b"), - (0x1D588, "M", "c"), - (0x1D589, "M", "d"), - (0x1D58A, "M", "e"), - (0x1D58B, "M", "f"), - (0x1D58C, "M", "g"), - (0x1D58D, "M", "h"), - (0x1D58E, "M", "i"), - (0x1D58F, "M", "j"), - (0x1D590, "M", "k"), - (0x1D591, "M", "l"), - (0x1D592, "M", "m"), - (0x1D593, "M", "n"), - (0x1D594, "M", "o"), - (0x1D595, "M", "p"), - (0x1D596, "M", "q"), - (0x1D597, "M", "r"), - (0x1D598, "M", "s"), - (0x1D599, "M", "t"), - (0x1D59A, "M", "u"), - (0x1D59B, "M", "v"), - (0x1D59C, "M", "w"), - (0x1D59D, "M", "x"), - (0x1D59E, "M", "y"), - (0x1D59F, "M", "z"), - (0x1D5A0, "M", "a"), - (0x1D5A1, "M", "b"), - (0x1D5A2, "M", "c"), - (0x1D5A3, "M", "d"), - (0x1D5A4, "M", "e"), - (0x1D5A5, "M", "f"), - (0x1D5A6, "M", "g"), - (0x1D5A7, "M", "h"), - (0x1D5A8, "M", "i"), - (0x1D5A9, "M", "j"), - (0x1D5AA, "M", "k"), - (0x1D5AB, "M", "l"), - (0x1D5AC, "M", "m"), - (0x1D5AD, "M", "n"), - (0x1D5AE, "M", "o"), - (0x1D5AF, "M", "p"), - (0x1D5B0, "M", "q"), - (0x1D5B1, "M", "r"), - (0x1D5B2, "M", "s"), - (0x1D5B3, "M", "t"), - (0x1D5B4, "M", "u"), - (0x1D5B5, "M", "v"), - (0x1D5B6, "M", "w"), - (0x1D5B7, "M", "x"), - (0x1D5B8, "M", "y"), - (0x1D5B9, "M", "z"), - (0x1D5BA, "M", "a"), - ] - - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D5BB, "M", "b"), - (0x1D5BC, "M", "c"), - (0x1D5BD, "M", "d"), - (0x1D5BE, "M", "e"), - (0x1D5BF, "M", "f"), - (0x1D5C0, "M", "g"), - (0x1D5C1, "M", "h"), - (0x1D5C2, "M", "i"), - (0x1D5C3, "M", "j"), - (0x1D5C4, "M", "k"), - (0x1D5C5, "M", "l"), - (0x1D5C6, "M", "m"), - (0x1D5C7, "M", "n"), - (0x1D5C8, "M", "o"), - (0x1D5C9, "M", "p"), - (0x1D5CA, "M", "q"), - (0x1D5CB, "M", "r"), - (0x1D5CC, "M", "s"), - (0x1D5CD, "M", "t"), - (0x1D5CE, "M", "u"), - (0x1D5CF, "M", "v"), - (0x1D5D0, "M", "w"), - (0x1D5D1, "M", "x"), - (0x1D5D2, "M", "y"), - (0x1D5D3, "M", "z"), - (0x1D5D4, "M", "a"), - (0x1D5D5, "M", "b"), - (0x1D5D6, "M", "c"), - (0x1D5D7, "M", "d"), - (0x1D5D8, "M", "e"), - (0x1D5D9, "M", "f"), - (0x1D5DA, "M", "g"), - (0x1D5DB, "M", "h"), - (0x1D5DC, "M", "i"), - (0x1D5DD, "M", "j"), - (0x1D5DE, "M", "k"), - (0x1D5DF, "M", "l"), - (0x1D5E0, "M", "m"), - (0x1D5E1, "M", "n"), - (0x1D5E2, "M", "o"), - (0x1D5E3, "M", "p"), - (0x1D5E4, "M", "q"), - (0x1D5E5, "M", "r"), - (0x1D5E6, "M", "s"), - (0x1D5E7, "M", "t"), - (0x1D5E8, "M", "u"), - (0x1D5E9, "M", "v"), - (0x1D5EA, "M", "w"), - (0x1D5EB, "M", "x"), - (0x1D5EC, "M", "y"), - (0x1D5ED, "M", "z"), - (0x1D5EE, "M", "a"), - (0x1D5EF, "M", "b"), - (0x1D5F0, "M", "c"), - (0x1D5F1, "M", "d"), - (0x1D5F2, "M", "e"), - (0x1D5F3, "M", "f"), - (0x1D5F4, "M", "g"), - (0x1D5F5, "M", "h"), - (0x1D5F6, "M", "i"), - (0x1D5F7, "M", "j"), - (0x1D5F8, "M", "k"), - (0x1D5F9, "M", "l"), - (0x1D5FA, "M", "m"), - (0x1D5FB, "M", "n"), - (0x1D5FC, "M", "o"), - (0x1D5FD, "M", "p"), - (0x1D5FE, "M", "q"), - (0x1D5FF, "M", "r"), - (0x1D600, "M", "s"), - (0x1D601, "M", "t"), - (0x1D602, "M", "u"), - (0x1D603, "M", "v"), - (0x1D604, "M", "w"), - (0x1D605, "M", "x"), - (0x1D606, "M", "y"), - (0x1D607, "M", "z"), - (0x1D608, "M", "a"), - (0x1D609, "M", "b"), - (0x1D60A, "M", "c"), - (0x1D60B, "M", "d"), - (0x1D60C, "M", "e"), - (0x1D60D, "M", "f"), - (0x1D60E, "M", "g"), - (0x1D60F, "M", "h"), - (0x1D610, "M", "i"), - (0x1D611, "M", "j"), - (0x1D612, "M", "k"), - (0x1D613, "M", "l"), - (0x1D614, "M", "m"), - (0x1D615, "M", "n"), - (0x1D616, "M", "o"), - (0x1D617, "M", "p"), - (0x1D618, "M", "q"), - (0x1D619, "M", "r"), - (0x1D61A, "M", "s"), - (0x1D61B, "M", "t"), - (0x1D61C, "M", "u"), - (0x1D61D, "M", "v"), - (0x1D61E, "M", "w"), - ] - - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D61F, "M", "x"), - (0x1D620, "M", "y"), - (0x1D621, "M", "z"), - (0x1D622, "M", "a"), - (0x1D623, "M", "b"), - (0x1D624, "M", "c"), - (0x1D625, "M", "d"), - (0x1D626, "M", "e"), - (0x1D627, "M", "f"), - (0x1D628, "M", "g"), - (0x1D629, "M", "h"), - (0x1D62A, "M", "i"), - (0x1D62B, "M", "j"), - (0x1D62C, "M", "k"), - (0x1D62D, "M", "l"), - (0x1D62E, "M", "m"), - (0x1D62F, "M", "n"), - (0x1D630, "M", "o"), - (0x1D631, "M", "p"), - (0x1D632, "M", "q"), - (0x1D633, "M", "r"), - (0x1D634, "M", "s"), - (0x1D635, "M", "t"), - (0x1D636, "M", "u"), - (0x1D637, "M", "v"), - (0x1D638, "M", "w"), - (0x1D639, "M", "x"), - (0x1D63A, "M", "y"), - (0x1D63B, "M", "z"), - (0x1D63C, "M", "a"), - (0x1D63D, "M", "b"), - (0x1D63E, "M", "c"), - (0x1D63F, "M", "d"), - (0x1D640, "M", "e"), - (0x1D641, "M", "f"), - (0x1D642, "M", "g"), - (0x1D643, "M", "h"), - (0x1D644, "M", "i"), - (0x1D645, "M", "j"), - (0x1D646, "M", "k"), - (0x1D647, "M", "l"), - (0x1D648, "M", "m"), - (0x1D649, "M", "n"), - (0x1D64A, "M", "o"), - (0x1D64B, "M", "p"), - (0x1D64C, "M", "q"), - (0x1D64D, "M", "r"), - (0x1D64E, "M", "s"), - (0x1D64F, "M", "t"), - (0x1D650, "M", "u"), - (0x1D651, "M", "v"), - (0x1D652, "M", "w"), - (0x1D653, "M", "x"), - (0x1D654, "M", "y"), - (0x1D655, "M", "z"), - (0x1D656, "M", "a"), - (0x1D657, "M", "b"), - (0x1D658, "M", "c"), - (0x1D659, "M", "d"), - (0x1D65A, "M", "e"), - (0x1D65B, "M", "f"), - (0x1D65C, "M", "g"), - (0x1D65D, "M", "h"), - (0x1D65E, "M", "i"), - (0x1D65F, "M", "j"), - (0x1D660, "M", "k"), - (0x1D661, "M", "l"), - (0x1D662, "M", "m"), - (0x1D663, "M", "n"), - (0x1D664, "M", "o"), - (0x1D665, "M", "p"), - (0x1D666, "M", "q"), - (0x1D667, "M", "r"), - (0x1D668, "M", "s"), - (0x1D669, "M", "t"), - (0x1D66A, "M", "u"), - (0x1D66B, "M", "v"), - (0x1D66C, "M", "w"), - (0x1D66D, "M", "x"), - (0x1D66E, "M", "y"), - (0x1D66F, "M", "z"), - (0x1D670, "M", "a"), - (0x1D671, "M", "b"), - (0x1D672, "M", "c"), - (0x1D673, "M", "d"), - (0x1D674, "M", "e"), - (0x1D675, "M", "f"), - (0x1D676, "M", "g"), - (0x1D677, "M", "h"), - (0x1D678, "M", "i"), - (0x1D679, "M", "j"), - (0x1D67A, "M", "k"), - (0x1D67B, "M", "l"), - (0x1D67C, "M", "m"), - (0x1D67D, "M", "n"), - (0x1D67E, "M", "o"), - (0x1D67F, "M", "p"), - (0x1D680, "M", "q"), - (0x1D681, "M", "r"), - (0x1D682, "M", "s"), - ] - - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D683, "M", "t"), - (0x1D684, "M", "u"), - (0x1D685, "M", "v"), - (0x1D686, "M", "w"), - (0x1D687, "M", "x"), - (0x1D688, "M", "y"), - (0x1D689, "M", "z"), - (0x1D68A, "M", "a"), - (0x1D68B, "M", "b"), - (0x1D68C, "M", "c"), - (0x1D68D, "M", "d"), - (0x1D68E, "M", "e"), - (0x1D68F, "M", "f"), - (0x1D690, "M", "g"), - (0x1D691, "M", "h"), - (0x1D692, "M", "i"), - (0x1D693, "M", "j"), - (0x1D694, "M", "k"), - (0x1D695, "M", "l"), - (0x1D696, "M", "m"), - (0x1D697, "M", "n"), - (0x1D698, "M", "o"), - (0x1D699, "M", "p"), - (0x1D69A, "M", "q"), - (0x1D69B, "M", "r"), - (0x1D69C, "M", "s"), - (0x1D69D, "M", "t"), - (0x1D69E, "M", "u"), - (0x1D69F, "M", "v"), - (0x1D6A0, "M", "w"), - (0x1D6A1, "M", "x"), - (0x1D6A2, "M", "y"), - (0x1D6A3, "M", "z"), - (0x1D6A4, "M", "ı"), - (0x1D6A5, "M", "ȷ"), - (0x1D6A6, "X"), - (0x1D6A8, "M", "α"), - (0x1D6A9, "M", "β"), - (0x1D6AA, "M", "γ"), - (0x1D6AB, "M", "δ"), - (0x1D6AC, "M", "ε"), - (0x1D6AD, "M", "ζ"), - (0x1D6AE, "M", "η"), - (0x1D6AF, "M", "θ"), - (0x1D6B0, "M", "ι"), - (0x1D6B1, "M", "κ"), - (0x1D6B2, "M", "λ"), - (0x1D6B3, "M", "μ"), - (0x1D6B4, "M", "ν"), - (0x1D6B5, "M", "ξ"), - (0x1D6B6, "M", "ο"), - (0x1D6B7, "M", "π"), - (0x1D6B8, "M", "ρ"), - (0x1D6B9, "M", "θ"), - (0x1D6BA, "M", "σ"), - (0x1D6BB, "M", "τ"), - (0x1D6BC, "M", "υ"), - (0x1D6BD, "M", "φ"), - (0x1D6BE, "M", "χ"), - (0x1D6BF, "M", "ψ"), - (0x1D6C0, "M", "ω"), - (0x1D6C1, "M", "∇"), - (0x1D6C2, "M", "α"), - (0x1D6C3, "M", "β"), - (0x1D6C4, "M", "γ"), - (0x1D6C5, "M", "δ"), - (0x1D6C6, "M", "ε"), - (0x1D6C7, "M", "ζ"), - (0x1D6C8, "M", "η"), - (0x1D6C9, "M", "θ"), - (0x1D6CA, "M", "ι"), - (0x1D6CB, "M", "κ"), - (0x1D6CC, "M", "λ"), - (0x1D6CD, "M", "μ"), - (0x1D6CE, "M", "ν"), - (0x1D6CF, "M", "ξ"), - (0x1D6D0, "M", "ο"), - (0x1D6D1, "M", "π"), - (0x1D6D2, "M", "ρ"), - (0x1D6D3, "M", "σ"), - (0x1D6D5, "M", "τ"), - (0x1D6D6, "M", "υ"), - (0x1D6D7, "M", "φ"), - (0x1D6D8, "M", "χ"), - (0x1D6D9, "M", "ψ"), - (0x1D6DA, "M", "ω"), - (0x1D6DB, "M", "∂"), - (0x1D6DC, "M", "ε"), - (0x1D6DD, "M", "θ"), - (0x1D6DE, "M", "κ"), - (0x1D6DF, "M", "φ"), - (0x1D6E0, "M", "ρ"), - (0x1D6E1, "M", "π"), - (0x1D6E2, "M", "α"), - (0x1D6E3, "M", "β"), - (0x1D6E4, "M", "γ"), - (0x1D6E5, "M", "δ"), - (0x1D6E6, "M", "ε"), - (0x1D6E7, "M", "ζ"), - (0x1D6E8, "M", "η"), - ] - - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D6E9, "M", "θ"), - (0x1D6EA, "M", "ι"), - (0x1D6EB, "M", "κ"), - (0x1D6EC, "M", "λ"), - (0x1D6ED, "M", "μ"), - (0x1D6EE, "M", "ν"), - (0x1D6EF, "M", "ξ"), - (0x1D6F0, "M", "ο"), - (0x1D6F1, "M", "π"), - (0x1D6F2, "M", "ρ"), - (0x1D6F3, "M", "θ"), - (0x1D6F4, "M", "σ"), - (0x1D6F5, "M", "τ"), - (0x1D6F6, "M", "υ"), - (0x1D6F7, "M", "φ"), - (0x1D6F8, "M", "χ"), - (0x1D6F9, "M", "ψ"), - (0x1D6FA, "M", "ω"), - (0x1D6FB, "M", "∇"), - (0x1D6FC, "M", "α"), - (0x1D6FD, "M", "β"), - (0x1D6FE, "M", "γ"), - (0x1D6FF, "M", "δ"), - (0x1D700, "M", "ε"), - (0x1D701, "M", "ζ"), - (0x1D702, "M", "η"), - (0x1D703, "M", "θ"), - (0x1D704, "M", "ι"), - (0x1D705, "M", "κ"), - (0x1D706, "M", "λ"), - (0x1D707, "M", "μ"), - (0x1D708, "M", "ν"), - (0x1D709, "M", "ξ"), - (0x1D70A, "M", "ο"), - (0x1D70B, "M", "π"), - (0x1D70C, "M", "ρ"), - (0x1D70D, "M", "σ"), - (0x1D70F, "M", "τ"), - (0x1D710, "M", "υ"), - (0x1D711, "M", "φ"), - (0x1D712, "M", "χ"), - (0x1D713, "M", "ψ"), - (0x1D714, "M", "ω"), - (0x1D715, "M", "∂"), - (0x1D716, "M", "ε"), - (0x1D717, "M", "θ"), - (0x1D718, "M", "κ"), - (0x1D719, "M", "φ"), - (0x1D71A, "M", "ρ"), - (0x1D71B, "M", "π"), - (0x1D71C, "M", "α"), - (0x1D71D, "M", "β"), - (0x1D71E, "M", "γ"), - (0x1D71F, "M", "δ"), - (0x1D720, "M", "ε"), - (0x1D721, "M", "ζ"), - (0x1D722, "M", "η"), - (0x1D723, "M", "θ"), - (0x1D724, "M", "ι"), - (0x1D725, "M", "κ"), - (0x1D726, "M", "λ"), - (0x1D727, "M", "μ"), - (0x1D728, "M", "ν"), - (0x1D729, "M", "ξ"), - (0x1D72A, "M", "ο"), - (0x1D72B, "M", "π"), - (0x1D72C, "M", "ρ"), - (0x1D72D, "M", "θ"), - (0x1D72E, "M", "σ"), - (0x1D72F, "M", "τ"), - (0x1D730, "M", "υ"), - (0x1D731, "M", "φ"), - (0x1D732, "M", "χ"), - (0x1D733, "M", "ψ"), - (0x1D734, "M", "ω"), - (0x1D735, "M", "∇"), - (0x1D736, "M", "α"), - (0x1D737, "M", "β"), - (0x1D738, "M", "γ"), - (0x1D739, "M", "δ"), - (0x1D73A, "M", "ε"), - (0x1D73B, "M", "ζ"), - (0x1D73C, "M", "η"), - (0x1D73D, "M", "θ"), - (0x1D73E, "M", "ι"), - (0x1D73F, "M", "κ"), - (0x1D740, "M", "λ"), - (0x1D741, "M", "μ"), - (0x1D742, "M", "ν"), - (0x1D743, "M", "ξ"), - (0x1D744, "M", "ο"), - (0x1D745, "M", "π"), - (0x1D746, "M", "ρ"), - (0x1D747, "M", "σ"), - (0x1D749, "M", "τ"), - (0x1D74A, "M", "υ"), - (0x1D74B, "M", "φ"), - (0x1D74C, "M", "χ"), - (0x1D74D, "M", "ψ"), - (0x1D74E, "M", "ω"), - ] - - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D74F, "M", "∂"), - (0x1D750, "M", "ε"), - (0x1D751, "M", "θ"), - (0x1D752, "M", "κ"), - (0x1D753, "M", "φ"), - (0x1D754, "M", "ρ"), - (0x1D755, "M", "π"), - (0x1D756, "M", "α"), - (0x1D757, "M", "β"), - (0x1D758, "M", "γ"), - (0x1D759, "M", "δ"), - (0x1D75A, "M", "ε"), - (0x1D75B, "M", "ζ"), - (0x1D75C, "M", "η"), - (0x1D75D, "M", "θ"), - (0x1D75E, "M", "ι"), - (0x1D75F, "M", "κ"), - (0x1D760, "M", "λ"), - (0x1D761, "M", "μ"), - (0x1D762, "M", "ν"), - (0x1D763, "M", "ξ"), - (0x1D764, "M", "ο"), - (0x1D765, "M", "π"), - (0x1D766, "M", "ρ"), - (0x1D767, "M", "θ"), - (0x1D768, "M", "σ"), - (0x1D769, "M", "τ"), - (0x1D76A, "M", "υ"), - (0x1D76B, "M", "φ"), - (0x1D76C, "M", "χ"), - (0x1D76D, "M", "ψ"), - (0x1D76E, "M", "ω"), - (0x1D76F, "M", "∇"), - (0x1D770, "M", "α"), - (0x1D771, "M", "β"), - (0x1D772, "M", "γ"), - (0x1D773, "M", "δ"), - (0x1D774, "M", "ε"), - (0x1D775, "M", "ζ"), - (0x1D776, "M", "η"), - (0x1D777, "M", "θ"), - (0x1D778, "M", "ι"), - (0x1D779, "M", "κ"), - (0x1D77A, "M", "λ"), - (0x1D77B, "M", "μ"), - (0x1D77C, "M", "ν"), - (0x1D77D, "M", "ξ"), - (0x1D77E, "M", "ο"), - (0x1D77F, "M", "π"), - (0x1D780, "M", "ρ"), - (0x1D781, "M", "σ"), - (0x1D783, "M", "τ"), - (0x1D784, "M", "υ"), - (0x1D785, "M", "φ"), - (0x1D786, "M", "χ"), - (0x1D787, "M", "ψ"), - (0x1D788, "M", "ω"), - (0x1D789, "M", "∂"), - (0x1D78A, "M", "ε"), - (0x1D78B, "M", "θ"), - (0x1D78C, "M", "κ"), - (0x1D78D, "M", "φ"), - (0x1D78E, "M", "ρ"), - (0x1D78F, "M", "π"), - (0x1D790, "M", "α"), - (0x1D791, "M", "β"), - (0x1D792, "M", "γ"), - (0x1D793, "M", "δ"), - (0x1D794, "M", "ε"), - (0x1D795, "M", "ζ"), - (0x1D796, "M", "η"), - (0x1D797, "M", "θ"), - (0x1D798, "M", "ι"), - (0x1D799, "M", "κ"), - (0x1D79A, "M", "λ"), - (0x1D79B, "M", "μ"), - (0x1D79C, "M", "ν"), - (0x1D79D, "M", "ξ"), - (0x1D79E, "M", "ο"), - (0x1D79F, "M", "π"), - (0x1D7A0, "M", "ρ"), - (0x1D7A1, "M", "θ"), - (0x1D7A2, "M", "σ"), - (0x1D7A3, "M", "τ"), - (0x1D7A4, "M", "υ"), - (0x1D7A5, "M", "φ"), - (0x1D7A6, "M", "χ"), - (0x1D7A7, "M", "ψ"), - (0x1D7A8, "M", "ω"), - (0x1D7A9, "M", "∇"), - (0x1D7AA, "M", "α"), - (0x1D7AB, "M", "β"), - (0x1D7AC, "M", "γ"), - (0x1D7AD, "M", "δ"), - (0x1D7AE, "M", "ε"), - (0x1D7AF, "M", "ζ"), - (0x1D7B0, "M", "η"), - (0x1D7B1, "M", "θ"), - (0x1D7B2, "M", "ι"), - (0x1D7B3, "M", "κ"), - ] - - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D7B4, "M", "λ"), - (0x1D7B5, "M", "μ"), - (0x1D7B6, "M", "ν"), - (0x1D7B7, "M", "ξ"), - (0x1D7B8, "M", "ο"), - (0x1D7B9, "M", "π"), - (0x1D7BA, "M", "ρ"), - (0x1D7BB, "M", "σ"), - (0x1D7BD, "M", "τ"), - (0x1D7BE, "M", "υ"), - (0x1D7BF, "M", "φ"), - (0x1D7C0, "M", "χ"), - (0x1D7C1, "M", "ψ"), - (0x1D7C2, "M", "ω"), - (0x1D7C3, "M", "∂"), - (0x1D7C4, "M", "ε"), - (0x1D7C5, "M", "θ"), - (0x1D7C6, "M", "κ"), - (0x1D7C7, "M", "φ"), - (0x1D7C8, "M", "ρ"), - (0x1D7C9, "M", "π"), - (0x1D7CA, "M", "ϝ"), - (0x1D7CC, "X"), - (0x1D7CE, "M", "0"), - (0x1D7CF, "M", "1"), - (0x1D7D0, "M", "2"), - (0x1D7D1, "M", "3"), - (0x1D7D2, "M", "4"), - (0x1D7D3, "M", "5"), - (0x1D7D4, "M", "6"), - (0x1D7D5, "M", "7"), - (0x1D7D6, "M", "8"), - (0x1D7D7, "M", "9"), - (0x1D7D8, "M", "0"), - (0x1D7D9, "M", "1"), - (0x1D7DA, "M", "2"), - (0x1D7DB, "M", "3"), - (0x1D7DC, "M", "4"), - (0x1D7DD, "M", "5"), - (0x1D7DE, "M", "6"), - (0x1D7DF, "M", "7"), - (0x1D7E0, "M", "8"), - (0x1D7E1, "M", "9"), - (0x1D7E2, "M", "0"), - (0x1D7E3, "M", "1"), - (0x1D7E4, "M", "2"), - (0x1D7E5, "M", "3"), - (0x1D7E6, "M", "4"), - (0x1D7E7, "M", "5"), - (0x1D7E8, "M", "6"), - (0x1D7E9, "M", "7"), - (0x1D7EA, "M", "8"), - (0x1D7EB, "M", "9"), - (0x1D7EC, "M", "0"), - (0x1D7ED, "M", "1"), - (0x1D7EE, "M", "2"), - (0x1D7EF, "M", "3"), - (0x1D7F0, "M", "4"), - (0x1D7F1, "M", "5"), - (0x1D7F2, "M", "6"), - (0x1D7F3, "M", "7"), - (0x1D7F4, "M", "8"), - (0x1D7F5, "M", "9"), - (0x1D7F6, "M", "0"), - (0x1D7F7, "M", "1"), - (0x1D7F8, "M", "2"), - (0x1D7F9, "M", "3"), - (0x1D7FA, "M", "4"), - (0x1D7FB, "M", "5"), - (0x1D7FC, "M", "6"), - (0x1D7FD, "M", "7"), - (0x1D7FE, "M", "8"), - (0x1D7FF, "M", "9"), - (0x1D800, "V"), - (0x1DA8C, "X"), - (0x1DA9B, "V"), - (0x1DAA0, "X"), - (0x1DAA1, "V"), - (0x1DAB0, "X"), - (0x1DF00, "V"), - (0x1DF1F, "X"), - (0x1DF25, "V"), - (0x1DF2B, "X"), - (0x1E000, "V"), - (0x1E007, "X"), - (0x1E008, "V"), - (0x1E019, "X"), - (0x1E01B, "V"), - (0x1E022, "X"), - (0x1E023, "V"), - (0x1E025, "X"), - (0x1E026, "V"), - (0x1E02B, "X"), - (0x1E030, "M", "а"), - (0x1E031, "M", "б"), - (0x1E032, "M", "в"), - (0x1E033, "M", "г"), - (0x1E034, "M", "д"), - (0x1E035, "M", "е"), - (0x1E036, "M", "ж"), - ] - - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E037, "M", "з"), - (0x1E038, "M", "и"), - (0x1E039, "M", "к"), - (0x1E03A, "M", "л"), - (0x1E03B, "M", "м"), - (0x1E03C, "M", "о"), - (0x1E03D, "M", "п"), - (0x1E03E, "M", "р"), - (0x1E03F, "M", "с"), - (0x1E040, "M", "т"), - (0x1E041, "M", "у"), - (0x1E042, "M", "ф"), - (0x1E043, "M", "х"), - (0x1E044, "M", "ц"), - (0x1E045, "M", "ч"), - (0x1E046, "M", "ш"), - (0x1E047, "M", "ы"), - (0x1E048, "M", "э"), - (0x1E049, "M", "ю"), - (0x1E04A, "M", "ꚉ"), - (0x1E04B, "M", "ә"), - (0x1E04C, "M", "і"), - (0x1E04D, "M", "ј"), - (0x1E04E, "M", "ө"), - (0x1E04F, "M", "ү"), - (0x1E050, "M", "ӏ"), - (0x1E051, "M", "а"), - (0x1E052, "M", "б"), - (0x1E053, "M", "в"), - (0x1E054, "M", "г"), - (0x1E055, "M", "д"), - (0x1E056, "M", "е"), - (0x1E057, "M", "ж"), - (0x1E058, "M", "з"), - (0x1E059, "M", "и"), - (0x1E05A, "M", "к"), - (0x1E05B, "M", "л"), - (0x1E05C, "M", "о"), - (0x1E05D, "M", "п"), - (0x1E05E, "M", "с"), - (0x1E05F, "M", "у"), - (0x1E060, "M", "ф"), - (0x1E061, "M", "х"), - (0x1E062, "M", "ц"), - (0x1E063, "M", "ч"), - (0x1E064, "M", "ш"), - (0x1E065, "M", "ъ"), - (0x1E066, "M", "ы"), - (0x1E067, "M", "ґ"), - (0x1E068, "M", "і"), - (0x1E069, "M", "ѕ"), - (0x1E06A, "M", "џ"), - (0x1E06B, "M", "ҫ"), - (0x1E06C, "M", "ꙑ"), - (0x1E06D, "M", "ұ"), - (0x1E06E, "X"), - (0x1E08F, "V"), - (0x1E090, "X"), - (0x1E100, "V"), - (0x1E12D, "X"), - (0x1E130, "V"), - (0x1E13E, "X"), - (0x1E140, "V"), - (0x1E14A, "X"), - (0x1E14E, "V"), - (0x1E150, "X"), - (0x1E290, "V"), - (0x1E2AF, "X"), - (0x1E2C0, "V"), - (0x1E2FA, "X"), - (0x1E2FF, "V"), - (0x1E300, "X"), - (0x1E4D0, "V"), - (0x1E4FA, "X"), - (0x1E7E0, "V"), - (0x1E7E7, "X"), - (0x1E7E8, "V"), - (0x1E7EC, "X"), - (0x1E7ED, "V"), - (0x1E7EF, "X"), - (0x1E7F0, "V"), - (0x1E7FF, "X"), - (0x1E800, "V"), - (0x1E8C5, "X"), - (0x1E8C7, "V"), - (0x1E8D7, "X"), - (0x1E900, "M", "𞤢"), - (0x1E901, "M", "𞤣"), - (0x1E902, "M", "𞤤"), - (0x1E903, "M", "𞤥"), - (0x1E904, "M", "𞤦"), - (0x1E905, "M", "𞤧"), - (0x1E906, "M", "𞤨"), - (0x1E907, "M", "𞤩"), - (0x1E908, "M", "𞤪"), - (0x1E909, "M", "𞤫"), - (0x1E90A, "M", "𞤬"), - (0x1E90B, "M", "𞤭"), - (0x1E90C, "M", "𞤮"), - (0x1E90D, "M", "𞤯"), - ] - - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E90E, "M", "𞤰"), - (0x1E90F, "M", "𞤱"), - (0x1E910, "M", "𞤲"), - (0x1E911, "M", "𞤳"), - (0x1E912, "M", "𞤴"), - (0x1E913, "M", "𞤵"), - (0x1E914, "M", "𞤶"), - (0x1E915, "M", "𞤷"), - (0x1E916, "M", "𞤸"), - (0x1E917, "M", "𞤹"), - (0x1E918, "M", "𞤺"), - (0x1E919, "M", "𞤻"), - (0x1E91A, "M", "𞤼"), - (0x1E91B, "M", "𞤽"), - (0x1E91C, "M", "𞤾"), - (0x1E91D, "M", "𞤿"), - (0x1E91E, "M", "𞥀"), - (0x1E91F, "M", "𞥁"), - (0x1E920, "M", "𞥂"), - (0x1E921, "M", "𞥃"), - (0x1E922, "V"), - (0x1E94C, "X"), - (0x1E950, "V"), - (0x1E95A, "X"), - (0x1E95E, "V"), - (0x1E960, "X"), - (0x1EC71, "V"), - (0x1ECB5, "X"), - (0x1ED01, "V"), - (0x1ED3E, "X"), - (0x1EE00, "M", "ا"), - (0x1EE01, "M", "ب"), - (0x1EE02, "M", "ج"), - (0x1EE03, "M", "د"), - (0x1EE04, "X"), - (0x1EE05, "M", "و"), - (0x1EE06, "M", "ز"), - (0x1EE07, "M", "ح"), - (0x1EE08, "M", "ط"), - (0x1EE09, "M", "ي"), - (0x1EE0A, "M", "ك"), - (0x1EE0B, "M", "ل"), - (0x1EE0C, "M", "م"), - (0x1EE0D, "M", "ن"), - (0x1EE0E, "M", "س"), - (0x1EE0F, "M", "ع"), - (0x1EE10, "M", "ف"), - (0x1EE11, "M", "ص"), - (0x1EE12, "M", "ق"), - (0x1EE13, "M", "ر"), - (0x1EE14, "M", "ش"), - (0x1EE15, "M", "ت"), - (0x1EE16, "M", "ث"), - (0x1EE17, "M", "خ"), - (0x1EE18, "M", "ذ"), - (0x1EE19, "M", "ض"), - (0x1EE1A, "M", "ظ"), - (0x1EE1B, "M", "غ"), - (0x1EE1C, "M", "ٮ"), - (0x1EE1D, "M", "ں"), - (0x1EE1E, "M", "ڡ"), - (0x1EE1F, "M", "ٯ"), - (0x1EE20, "X"), - (0x1EE21, "M", "ب"), - (0x1EE22, "M", "ج"), - (0x1EE23, "X"), - (0x1EE24, "M", "ه"), - (0x1EE25, "X"), - (0x1EE27, "M", "ح"), - (0x1EE28, "X"), - (0x1EE29, "M", "ي"), - (0x1EE2A, "M", "ك"), - (0x1EE2B, "M", "ل"), - (0x1EE2C, "M", "م"), - (0x1EE2D, "M", "ن"), - (0x1EE2E, "M", "س"), - (0x1EE2F, "M", "ع"), - (0x1EE30, "M", "ف"), - (0x1EE31, "M", "ص"), - (0x1EE32, "M", "ق"), - (0x1EE33, "X"), - (0x1EE34, "M", "ش"), - (0x1EE35, "M", "ت"), - (0x1EE36, "M", "ث"), - (0x1EE37, "M", "خ"), - (0x1EE38, "X"), - (0x1EE39, "M", "ض"), - (0x1EE3A, "X"), - (0x1EE3B, "M", "غ"), - (0x1EE3C, "X"), - (0x1EE42, "M", "ج"), - (0x1EE43, "X"), - (0x1EE47, "M", "ح"), - (0x1EE48, "X"), - (0x1EE49, "M", "ي"), - (0x1EE4A, "X"), - (0x1EE4B, "M", "ل"), - (0x1EE4C, "X"), - (0x1EE4D, "M", "ن"), - (0x1EE4E, "M", "س"), - ] - - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE4F, "M", "ع"), - (0x1EE50, "X"), - (0x1EE51, "M", "ص"), - (0x1EE52, "M", "ق"), - (0x1EE53, "X"), - (0x1EE54, "M", "ش"), - (0x1EE55, "X"), - (0x1EE57, "M", "خ"), - (0x1EE58, "X"), - (0x1EE59, "M", "ض"), - (0x1EE5A, "X"), - (0x1EE5B, "M", "غ"), - (0x1EE5C, "X"), - (0x1EE5D, "M", "ں"), - (0x1EE5E, "X"), - (0x1EE5F, "M", "ٯ"), - (0x1EE60, "X"), - (0x1EE61, "M", "ب"), - (0x1EE62, "M", "ج"), - (0x1EE63, "X"), - (0x1EE64, "M", "ه"), - (0x1EE65, "X"), - (0x1EE67, "M", "ح"), - (0x1EE68, "M", "ط"), - (0x1EE69, "M", "ي"), - (0x1EE6A, "M", "ك"), - (0x1EE6B, "X"), - (0x1EE6C, "M", "م"), - (0x1EE6D, "M", "ن"), - (0x1EE6E, "M", "س"), - (0x1EE6F, "M", "ع"), - (0x1EE70, "M", "ف"), - (0x1EE71, "M", "ص"), - (0x1EE72, "M", "ق"), - (0x1EE73, "X"), - (0x1EE74, "M", "ش"), - (0x1EE75, "M", "ت"), - (0x1EE76, "M", "ث"), - (0x1EE77, "M", "خ"), - (0x1EE78, "X"), - (0x1EE79, "M", "ض"), - (0x1EE7A, "M", "ظ"), - (0x1EE7B, "M", "غ"), - (0x1EE7C, "M", "ٮ"), - (0x1EE7D, "X"), - (0x1EE7E, "M", "ڡ"), - (0x1EE7F, "X"), - (0x1EE80, "M", "ا"), - (0x1EE81, "M", "ب"), - (0x1EE82, "M", "ج"), - (0x1EE83, "M", "د"), - (0x1EE84, "M", "ه"), - (0x1EE85, "M", "و"), - (0x1EE86, "M", "ز"), - (0x1EE87, "M", "ح"), - (0x1EE88, "M", "ط"), - (0x1EE89, "M", "ي"), - (0x1EE8A, "X"), - (0x1EE8B, "M", "ل"), - (0x1EE8C, "M", "م"), - (0x1EE8D, "M", "ن"), - (0x1EE8E, "M", "س"), - (0x1EE8F, "M", "ع"), - (0x1EE90, "M", "ف"), - (0x1EE91, "M", "ص"), - (0x1EE92, "M", "ق"), - (0x1EE93, "M", "ر"), - (0x1EE94, "M", "ش"), - (0x1EE95, "M", "ت"), - (0x1EE96, "M", "ث"), - (0x1EE97, "M", "خ"), - (0x1EE98, "M", "ذ"), - (0x1EE99, "M", "ض"), - (0x1EE9A, "M", "ظ"), - (0x1EE9B, "M", "غ"), - (0x1EE9C, "X"), - (0x1EEA1, "M", "ب"), - (0x1EEA2, "M", "ج"), - (0x1EEA3, "M", "د"), - (0x1EEA4, "X"), - (0x1EEA5, "M", "و"), - (0x1EEA6, "M", "ز"), - (0x1EEA7, "M", "ح"), - (0x1EEA8, "M", "ط"), - (0x1EEA9, "M", "ي"), - (0x1EEAA, "X"), - (0x1EEAB, "M", "ل"), - (0x1EEAC, "M", "م"), - (0x1EEAD, "M", "ن"), - (0x1EEAE, "M", "س"), - (0x1EEAF, "M", "ع"), - (0x1EEB0, "M", "ف"), - (0x1EEB1, "M", "ص"), - (0x1EEB2, "M", "ق"), - (0x1EEB3, "M", "ر"), - (0x1EEB4, "M", "ش"), - (0x1EEB5, "M", "ت"), - (0x1EEB6, "M", "ث"), - (0x1EEB7, "M", "خ"), - (0x1EEB8, "M", "ذ"), - ] - - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EEB9, "M", "ض"), - (0x1EEBA, "M", "ظ"), - (0x1EEBB, "M", "غ"), - (0x1EEBC, "X"), - (0x1EEF0, "V"), - (0x1EEF2, "X"), - (0x1F000, "V"), - (0x1F02C, "X"), - (0x1F030, "V"), - (0x1F094, "X"), - (0x1F0A0, "V"), - (0x1F0AF, "X"), - (0x1F0B1, "V"), - (0x1F0C0, "X"), - (0x1F0C1, "V"), - (0x1F0D0, "X"), - (0x1F0D1, "V"), - (0x1F0F6, "X"), - (0x1F101, "3", "0,"), - (0x1F102, "3", "1,"), - (0x1F103, "3", "2,"), - (0x1F104, "3", "3,"), - (0x1F105, "3", "4,"), - (0x1F106, "3", "5,"), - (0x1F107, "3", "6,"), - (0x1F108, "3", "7,"), - (0x1F109, "3", "8,"), - (0x1F10A, "3", "9,"), - (0x1F10B, "V"), - (0x1F110, "3", "(a)"), - (0x1F111, "3", "(b)"), - (0x1F112, "3", "(c)"), - (0x1F113, "3", "(d)"), - (0x1F114, "3", "(e)"), - (0x1F115, "3", "(f)"), - (0x1F116, "3", "(g)"), - (0x1F117, "3", "(h)"), - (0x1F118, "3", "(i)"), - (0x1F119, "3", "(j)"), - (0x1F11A, "3", "(k)"), - (0x1F11B, "3", "(l)"), - (0x1F11C, "3", "(m)"), - (0x1F11D, "3", "(n)"), - (0x1F11E, "3", "(o)"), - (0x1F11F, "3", "(p)"), - (0x1F120, "3", "(q)"), - (0x1F121, "3", "(r)"), - (0x1F122, "3", "(s)"), - (0x1F123, "3", "(t)"), - (0x1F124, "3", "(u)"), - (0x1F125, "3", "(v)"), - (0x1F126, "3", "(w)"), - (0x1F127, "3", "(x)"), - (0x1F128, "3", "(y)"), - (0x1F129, "3", "(z)"), - (0x1F12A, "M", "〔s〕"), - (0x1F12B, "M", "c"), - (0x1F12C, "M", "r"), - (0x1F12D, "M", "cd"), - (0x1F12E, "M", "wz"), - (0x1F12F, "V"), - (0x1F130, "M", "a"), - (0x1F131, "M", "b"), - (0x1F132, "M", "c"), - (0x1F133, "M", "d"), - (0x1F134, "M", "e"), - (0x1F135, "M", "f"), - (0x1F136, "M", "g"), - (0x1F137, "M", "h"), - (0x1F138, "M", "i"), - (0x1F139, "M", "j"), - (0x1F13A, "M", "k"), - (0x1F13B, "M", "l"), - (0x1F13C, "M", "m"), - (0x1F13D, "M", "n"), - (0x1F13E, "M", "o"), - (0x1F13F, "M", "p"), - (0x1F140, "M", "q"), - (0x1F141, "M", "r"), - (0x1F142, "M", "s"), - (0x1F143, "M", "t"), - (0x1F144, "M", "u"), - (0x1F145, "M", "v"), - (0x1F146, "M", "w"), - (0x1F147, "M", "x"), - (0x1F148, "M", "y"), - (0x1F149, "M", "z"), - (0x1F14A, "M", "hv"), - (0x1F14B, "M", "mv"), - (0x1F14C, "M", "sd"), - (0x1F14D, "M", "ss"), - (0x1F14E, "M", "ppv"), - (0x1F14F, "M", "wc"), - (0x1F150, "V"), - (0x1F16A, "M", "mc"), - (0x1F16B, "M", "md"), - (0x1F16C, "M", "mr"), - (0x1F16D, "V"), - (0x1F190, "M", "dj"), - (0x1F191, "V"), - ] - - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F1AE, "X"), - (0x1F1E6, "V"), - (0x1F200, "M", "ほか"), - (0x1F201, "M", "ココ"), - (0x1F202, "M", "サ"), - (0x1F203, "X"), - (0x1F210, "M", "手"), - (0x1F211, "M", "字"), - (0x1F212, "M", "双"), - (0x1F213, "M", "デ"), - (0x1F214, "M", "二"), - (0x1F215, "M", "多"), - (0x1F216, "M", "解"), - (0x1F217, "M", "天"), - (0x1F218, "M", "交"), - (0x1F219, "M", "映"), - (0x1F21A, "M", "無"), - (0x1F21B, "M", "料"), - (0x1F21C, "M", "前"), - (0x1F21D, "M", "後"), - (0x1F21E, "M", "再"), - (0x1F21F, "M", "新"), - (0x1F220, "M", "初"), - (0x1F221, "M", "終"), - (0x1F222, "M", "生"), - (0x1F223, "M", "販"), - (0x1F224, "M", "声"), - (0x1F225, "M", "吹"), - (0x1F226, "M", "演"), - (0x1F227, "M", "投"), - (0x1F228, "M", "捕"), - (0x1F229, "M", "一"), - (0x1F22A, "M", "三"), - (0x1F22B, "M", "遊"), - (0x1F22C, "M", "左"), - (0x1F22D, "M", "中"), - (0x1F22E, "M", "右"), - (0x1F22F, "M", "指"), - (0x1F230, "M", "走"), - (0x1F231, "M", "打"), - (0x1F232, "M", "禁"), - (0x1F233, "M", "空"), - (0x1F234, "M", "合"), - (0x1F235, "M", "満"), - (0x1F236, "M", "有"), - (0x1F237, "M", "月"), - (0x1F238, "M", "申"), - (0x1F239, "M", "割"), - (0x1F23A, "M", "営"), - (0x1F23B, "M", "配"), - (0x1F23C, "X"), - (0x1F240, "M", "〔本〕"), - (0x1F241, "M", "〔三〕"), - (0x1F242, "M", "〔二〕"), - (0x1F243, "M", "〔安〕"), - (0x1F244, "M", "〔点〕"), - (0x1F245, "M", "〔打〕"), - (0x1F246, "M", "〔盗〕"), - (0x1F247, "M", "〔勝〕"), - (0x1F248, "M", "〔敗〕"), - (0x1F249, "X"), - (0x1F250, "M", "得"), - (0x1F251, "M", "可"), - (0x1F252, "X"), - (0x1F260, "V"), - (0x1F266, "X"), - (0x1F300, "V"), - (0x1F6D8, "X"), - (0x1F6DC, "V"), - (0x1F6ED, "X"), - (0x1F6F0, "V"), - (0x1F6FD, "X"), - (0x1F700, "V"), - (0x1F777, "X"), - (0x1F77B, "V"), - (0x1F7DA, "X"), - (0x1F7E0, "V"), - (0x1F7EC, "X"), - (0x1F7F0, "V"), - (0x1F7F1, "X"), - (0x1F800, "V"), - (0x1F80C, "X"), - (0x1F810, "V"), - (0x1F848, "X"), - (0x1F850, "V"), - (0x1F85A, "X"), - (0x1F860, "V"), - (0x1F888, "X"), - (0x1F890, "V"), - (0x1F8AE, "X"), - (0x1F8B0, "V"), - (0x1F8B2, "X"), - (0x1F900, "V"), - (0x1FA54, "X"), - (0x1FA60, "V"), - (0x1FA6E, "X"), - (0x1FA70, "V"), - (0x1FA7D, "X"), - (0x1FA80, "V"), - (0x1FA89, "X"), - ] - - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FA90, "V"), - (0x1FABE, "X"), - (0x1FABF, "V"), - (0x1FAC6, "X"), - (0x1FACE, "V"), - (0x1FADC, "X"), - (0x1FAE0, "V"), - (0x1FAE9, "X"), - (0x1FAF0, "V"), - (0x1FAF9, "X"), - (0x1FB00, "V"), - (0x1FB93, "X"), - (0x1FB94, "V"), - (0x1FBCB, "X"), - (0x1FBF0, "M", "0"), - (0x1FBF1, "M", "1"), - (0x1FBF2, "M", "2"), - (0x1FBF3, "M", "3"), - (0x1FBF4, "M", "4"), - (0x1FBF5, "M", "5"), - (0x1FBF6, "M", "6"), - (0x1FBF7, "M", "7"), - (0x1FBF8, "M", "8"), - (0x1FBF9, "M", "9"), - (0x1FBFA, "X"), - (0x20000, "V"), - (0x2A6E0, "X"), - (0x2A700, "V"), - (0x2B73A, "X"), - (0x2B740, "V"), - (0x2B81E, "X"), - (0x2B820, "V"), - (0x2CEA2, "X"), - (0x2CEB0, "V"), - (0x2EBE1, "X"), - (0x2EBF0, "V"), - (0x2EE5E, "X"), - (0x2F800, "M", "丽"), - (0x2F801, "M", "丸"), - (0x2F802, "M", "乁"), - (0x2F803, "M", "𠄢"), - (0x2F804, "M", "你"), - (0x2F805, "M", "侮"), - (0x2F806, "M", "侻"), - (0x2F807, "M", "倂"), - (0x2F808, "M", "偺"), - (0x2F809, "M", "備"), - (0x2F80A, "M", "僧"), - (0x2F80B, "M", "像"), - (0x2F80C, "M", "㒞"), - (0x2F80D, "M", "𠘺"), - (0x2F80E, "M", "免"), - (0x2F80F, "M", "兔"), - (0x2F810, "M", "兤"), - (0x2F811, "M", "具"), - (0x2F812, "M", "𠔜"), - (0x2F813, "M", "㒹"), - (0x2F814, "M", "內"), - (0x2F815, "M", "再"), - (0x2F816, "M", "𠕋"), - (0x2F817, "M", "冗"), - (0x2F818, "M", "冤"), - (0x2F819, "M", "仌"), - (0x2F81A, "M", "冬"), - (0x2F81B, "M", "况"), - (0x2F81C, "M", "𩇟"), - (0x2F81D, "M", "凵"), - (0x2F81E, "M", "刃"), - (0x2F81F, "M", "㓟"), - (0x2F820, "M", "刻"), - (0x2F821, "M", "剆"), - (0x2F822, "M", "割"), - (0x2F823, "M", "剷"), - (0x2F824, "M", "㔕"), - (0x2F825, "M", "勇"), - (0x2F826, "M", "勉"), - (0x2F827, "M", "勤"), - (0x2F828, "M", "勺"), - (0x2F829, "M", "包"), - (0x2F82A, "M", "匆"), - (0x2F82B, "M", "北"), - (0x2F82C, "M", "卉"), - (0x2F82D, "M", "卑"), - (0x2F82E, "M", "博"), - (0x2F82F, "M", "即"), - (0x2F830, "M", "卽"), - (0x2F831, "M", "卿"), - (0x2F834, "M", "𠨬"), - (0x2F835, "M", "灰"), - (0x2F836, "M", "及"), - (0x2F837, "M", "叟"), - (0x2F838, "M", "𠭣"), - (0x2F839, "M", "叫"), - (0x2F83A, "M", "叱"), - (0x2F83B, "M", "吆"), - (0x2F83C, "M", "咞"), - (0x2F83D, "M", "吸"), - (0x2F83E, "M", "呈"), - (0x2F83F, "M", "周"), - (0x2F840, "M", "咢"), - ] - - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F841, "M", "哶"), - (0x2F842, "M", "唐"), - (0x2F843, "M", "啓"), - (0x2F844, "M", "啣"), - (0x2F845, "M", "善"), - (0x2F847, "M", "喙"), - (0x2F848, "M", "喫"), - (0x2F849, "M", "喳"), - (0x2F84A, "M", "嗂"), - (0x2F84B, "M", "圖"), - (0x2F84C, "M", "嘆"), - (0x2F84D, "M", "圗"), - (0x2F84E, "M", "噑"), - (0x2F84F, "M", "噴"), - (0x2F850, "M", "切"), - (0x2F851, "M", "壮"), - (0x2F852, "M", "城"), - (0x2F853, "M", "埴"), - (0x2F854, "M", "堍"), - (0x2F855, "M", "型"), - (0x2F856, "M", "堲"), - (0x2F857, "M", "報"), - (0x2F858, "M", "墬"), - (0x2F859, "M", "𡓤"), - (0x2F85A, "M", "売"), - (0x2F85B, "M", "壷"), - (0x2F85C, "M", "夆"), - (0x2F85D, "M", "多"), - (0x2F85E, "M", "夢"), - (0x2F85F, "M", "奢"), - (0x2F860, "M", "𡚨"), - (0x2F861, "M", "𡛪"), - (0x2F862, "M", "姬"), - (0x2F863, "M", "娛"), - (0x2F864, "M", "娧"), - (0x2F865, "M", "姘"), - (0x2F866, "M", "婦"), - (0x2F867, "M", "㛮"), - (0x2F868, "X"), - (0x2F869, "M", "嬈"), - (0x2F86A, "M", "嬾"), - (0x2F86C, "M", "𡧈"), - (0x2F86D, "M", "寃"), - (0x2F86E, "M", "寘"), - (0x2F86F, "M", "寧"), - (0x2F870, "M", "寳"), - (0x2F871, "M", "𡬘"), - (0x2F872, "M", "寿"), - (0x2F873, "M", "将"), - (0x2F874, "X"), - (0x2F875, "M", "尢"), - (0x2F876, "M", "㞁"), - (0x2F877, "M", "屠"), - (0x2F878, "M", "屮"), - (0x2F879, "M", "峀"), - (0x2F87A, "M", "岍"), - (0x2F87B, "M", "𡷤"), - (0x2F87C, "M", "嵃"), - (0x2F87D, "M", "𡷦"), - (0x2F87E, "M", "嵮"), - (0x2F87F, "M", "嵫"), - (0x2F880, "M", "嵼"), - (0x2F881, "M", "巡"), - (0x2F882, "M", "巢"), - (0x2F883, "M", "㠯"), - (0x2F884, "M", "巽"), - (0x2F885, "M", "帨"), - (0x2F886, "M", "帽"), - (0x2F887, "M", "幩"), - (0x2F888, "M", "㡢"), - (0x2F889, "M", "𢆃"), - (0x2F88A, "M", "㡼"), - (0x2F88B, "M", "庰"), - (0x2F88C, "M", "庳"), - (0x2F88D, "M", "庶"), - (0x2F88E, "M", "廊"), - (0x2F88F, "M", "𪎒"), - (0x2F890, "M", "廾"), - (0x2F891, "M", "𢌱"), - (0x2F893, "M", "舁"), - (0x2F894, "M", "弢"), - (0x2F896, "M", "㣇"), - (0x2F897, "M", "𣊸"), - (0x2F898, "M", "𦇚"), - (0x2F899, "M", "形"), - (0x2F89A, "M", "彫"), - (0x2F89B, "M", "㣣"), - (0x2F89C, "M", "徚"), - (0x2F89D, "M", "忍"), - (0x2F89E, "M", "志"), - (0x2F89F, "M", "忹"), - (0x2F8A0, "M", "悁"), - (0x2F8A1, "M", "㤺"), - (0x2F8A2, "M", "㤜"), - (0x2F8A3, "M", "悔"), - (0x2F8A4, "M", "𢛔"), - (0x2F8A5, "M", "惇"), - (0x2F8A6, "M", "慈"), - (0x2F8A7, "M", "慌"), - (0x2F8A8, "M", "慎"), - ] - - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F8A9, "M", "慌"), - (0x2F8AA, "M", "慺"), - (0x2F8AB, "M", "憎"), - (0x2F8AC, "M", "憲"), - (0x2F8AD, "M", "憤"), - (0x2F8AE, "M", "憯"), - (0x2F8AF, "M", "懞"), - (0x2F8B0, "M", "懲"), - (0x2F8B1, "M", "懶"), - (0x2F8B2, "M", "成"), - (0x2F8B3, "M", "戛"), - (0x2F8B4, "M", "扝"), - (0x2F8B5, "M", "抱"), - (0x2F8B6, "M", "拔"), - (0x2F8B7, "M", "捐"), - (0x2F8B8, "M", "𢬌"), - (0x2F8B9, "M", "挽"), - (0x2F8BA, "M", "拼"), - (0x2F8BB, "M", "捨"), - (0x2F8BC, "M", "掃"), - (0x2F8BD, "M", "揤"), - (0x2F8BE, "M", "𢯱"), - (0x2F8BF, "M", "搢"), - (0x2F8C0, "M", "揅"), - (0x2F8C1, "M", "掩"), - (0x2F8C2, "M", "㨮"), - (0x2F8C3, "M", "摩"), - (0x2F8C4, "M", "摾"), - (0x2F8C5, "M", "撝"), - (0x2F8C6, "M", "摷"), - (0x2F8C7, "M", "㩬"), - (0x2F8C8, "M", "敏"), - (0x2F8C9, "M", "敬"), - (0x2F8CA, "M", "𣀊"), - (0x2F8CB, "M", "旣"), - (0x2F8CC, "M", "書"), - (0x2F8CD, "M", "晉"), - (0x2F8CE, "M", "㬙"), - (0x2F8CF, "M", "暑"), - (0x2F8D0, "M", "㬈"), - (0x2F8D1, "M", "㫤"), - (0x2F8D2, "M", "冒"), - (0x2F8D3, "M", "冕"), - (0x2F8D4, "M", "最"), - (0x2F8D5, "M", "暜"), - (0x2F8D6, "M", "肭"), - (0x2F8D7, "M", "䏙"), - (0x2F8D8, "M", "朗"), - (0x2F8D9, "M", "望"), - (0x2F8DA, "M", "朡"), - (0x2F8DB, "M", "杞"), - (0x2F8DC, "M", "杓"), - (0x2F8DD, "M", "𣏃"), - (0x2F8DE, "M", "㭉"), - (0x2F8DF, "M", "柺"), - (0x2F8E0, "M", "枅"), - (0x2F8E1, "M", "桒"), - (0x2F8E2, "M", "梅"), - (0x2F8E3, "M", "𣑭"), - (0x2F8E4, "M", "梎"), - (0x2F8E5, "M", "栟"), - (0x2F8E6, "M", "椔"), - (0x2F8E7, "M", "㮝"), - (0x2F8E8, "M", "楂"), - (0x2F8E9, "M", "榣"), - (0x2F8EA, "M", "槪"), - (0x2F8EB, "M", "檨"), - (0x2F8EC, "M", "𣚣"), - (0x2F8ED, "M", "櫛"), - (0x2F8EE, "M", "㰘"), - (0x2F8EF, "M", "次"), - (0x2F8F0, "M", "𣢧"), - (0x2F8F1, "M", "歔"), - (0x2F8F2, "M", "㱎"), - (0x2F8F3, "M", "歲"), - (0x2F8F4, "M", "殟"), - (0x2F8F5, "M", "殺"), - (0x2F8F6, "M", "殻"), - (0x2F8F7, "M", "𣪍"), - (0x2F8F8, "M", "𡴋"), - (0x2F8F9, "M", "𣫺"), - (0x2F8FA, "M", "汎"), - (0x2F8FB, "M", "𣲼"), - (0x2F8FC, "M", "沿"), - (0x2F8FD, "M", "泍"), - (0x2F8FE, "M", "汧"), - (0x2F8FF, "M", "洖"), - (0x2F900, "M", "派"), - (0x2F901, "M", "海"), - (0x2F902, "M", "流"), - (0x2F903, "M", "浩"), - (0x2F904, "M", "浸"), - (0x2F905, "M", "涅"), - (0x2F906, "M", "𣴞"), - (0x2F907, "M", "洴"), - (0x2F908, "M", "港"), - (0x2F909, "M", "湮"), - (0x2F90A, "M", "㴳"), - (0x2F90B, "M", "滋"), - (0x2F90C, "M", "滇"), - ] - - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F90D, "M", "𣻑"), - (0x2F90E, "M", "淹"), - (0x2F90F, "M", "潮"), - (0x2F910, "M", "𣽞"), - (0x2F911, "M", "𣾎"), - (0x2F912, "M", "濆"), - (0x2F913, "M", "瀹"), - (0x2F914, "M", "瀞"), - (0x2F915, "M", "瀛"), - (0x2F916, "M", "㶖"), - (0x2F917, "M", "灊"), - (0x2F918, "M", "災"), - (0x2F919, "M", "灷"), - (0x2F91A, "M", "炭"), - (0x2F91B, "M", "𠔥"), - (0x2F91C, "M", "煅"), - (0x2F91D, "M", "𤉣"), - (0x2F91E, "M", "熜"), - (0x2F91F, "X"), - (0x2F920, "M", "爨"), - (0x2F921, "M", "爵"), - (0x2F922, "M", "牐"), - (0x2F923, "M", "𤘈"), - (0x2F924, "M", "犀"), - (0x2F925, "M", "犕"), - (0x2F926, "M", "𤜵"), - (0x2F927, "M", "𤠔"), - (0x2F928, "M", "獺"), - (0x2F929, "M", "王"), - (0x2F92A, "M", "㺬"), - (0x2F92B, "M", "玥"), - (0x2F92C, "M", "㺸"), - (0x2F92E, "M", "瑇"), - (0x2F92F, "M", "瑜"), - (0x2F930, "M", "瑱"), - (0x2F931, "M", "璅"), - (0x2F932, "M", "瓊"), - (0x2F933, "M", "㼛"), - (0x2F934, "M", "甤"), - (0x2F935, "M", "𤰶"), - (0x2F936, "M", "甾"), - (0x2F937, "M", "𤲒"), - (0x2F938, "M", "異"), - (0x2F939, "M", "𢆟"), - (0x2F93A, "M", "瘐"), - (0x2F93B, "M", "𤾡"), - (0x2F93C, "M", "𤾸"), - (0x2F93D, "M", "𥁄"), - (0x2F93E, "M", "㿼"), - (0x2F93F, "M", "䀈"), - (0x2F940, "M", "直"), - (0x2F941, "M", "𥃳"), - (0x2F942, "M", "𥃲"), - (0x2F943, "M", "𥄙"), - (0x2F944, "M", "𥄳"), - (0x2F945, "M", "眞"), - (0x2F946, "M", "真"), - (0x2F948, "M", "睊"), - (0x2F949, "M", "䀹"), - (0x2F94A, "M", "瞋"), - (0x2F94B, "M", "䁆"), - (0x2F94C, "M", "䂖"), - (0x2F94D, "M", "𥐝"), - (0x2F94E, "M", "硎"), - (0x2F94F, "M", "碌"), - (0x2F950, "M", "磌"), - (0x2F951, "M", "䃣"), - (0x2F952, "M", "𥘦"), - (0x2F953, "M", "祖"), - (0x2F954, "M", "𥚚"), - (0x2F955, "M", "𥛅"), - (0x2F956, "M", "福"), - (0x2F957, "M", "秫"), - (0x2F958, "M", "䄯"), - (0x2F959, "M", "穀"), - (0x2F95A, "M", "穊"), - (0x2F95B, "M", "穏"), - (0x2F95C, "M", "𥥼"), - (0x2F95D, "M", "𥪧"), - (0x2F95F, "X"), - (0x2F960, "M", "䈂"), - (0x2F961, "M", "𥮫"), - (0x2F962, "M", "篆"), - (0x2F963, "M", "築"), - (0x2F964, "M", "䈧"), - (0x2F965, "M", "𥲀"), - (0x2F966, "M", "糒"), - (0x2F967, "M", "䊠"), - (0x2F968, "M", "糨"), - (0x2F969, "M", "糣"), - (0x2F96A, "M", "紀"), - (0x2F96B, "M", "𥾆"), - (0x2F96C, "M", "絣"), - (0x2F96D, "M", "䌁"), - (0x2F96E, "M", "緇"), - (0x2F96F, "M", "縂"), - (0x2F970, "M", "繅"), - (0x2F971, "M", "䌴"), - (0x2F972, "M", "𦈨"), - (0x2F973, "M", "𦉇"), - ] - - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F974, "M", "䍙"), - (0x2F975, "M", "𦋙"), - (0x2F976, "M", "罺"), - (0x2F977, "M", "𦌾"), - (0x2F978, "M", "羕"), - (0x2F979, "M", "翺"), - (0x2F97A, "M", "者"), - (0x2F97B, "M", "𦓚"), - (0x2F97C, "M", "𦔣"), - (0x2F97D, "M", "聠"), - (0x2F97E, "M", "𦖨"), - (0x2F97F, "M", "聰"), - (0x2F980, "M", "𣍟"), - (0x2F981, "M", "䏕"), - (0x2F982, "M", "育"), - (0x2F983, "M", "脃"), - (0x2F984, "M", "䐋"), - (0x2F985, "M", "脾"), - (0x2F986, "M", "媵"), - (0x2F987, "M", "𦞧"), - (0x2F988, "M", "𦞵"), - (0x2F989, "M", "𣎓"), - (0x2F98A, "M", "𣎜"), - (0x2F98B, "M", "舁"), - (0x2F98C, "M", "舄"), - (0x2F98D, "M", "辞"), - (0x2F98E, "M", "䑫"), - (0x2F98F, "M", "芑"), - (0x2F990, "M", "芋"), - (0x2F991, "M", "芝"), - (0x2F992, "M", "劳"), - (0x2F993, "M", "花"), - (0x2F994, "M", "芳"), - (0x2F995, "M", "芽"), - (0x2F996, "M", "苦"), - (0x2F997, "M", "𦬼"), - (0x2F998, "M", "若"), - (0x2F999, "M", "茝"), - (0x2F99A, "M", "荣"), - (0x2F99B, "M", "莭"), - (0x2F99C, "M", "茣"), - (0x2F99D, "M", "莽"), - (0x2F99E, "M", "菧"), - (0x2F99F, "M", "著"), - (0x2F9A0, "M", "荓"), - (0x2F9A1, "M", "菊"), - (0x2F9A2, "M", "菌"), - (0x2F9A3, "M", "菜"), - (0x2F9A4, "M", "𦰶"), - (0x2F9A5, "M", "𦵫"), - (0x2F9A6, "M", "𦳕"), - (0x2F9A7, "M", "䔫"), - (0x2F9A8, "M", "蓱"), - (0x2F9A9, "M", "蓳"), - (0x2F9AA, "M", "蔖"), - (0x2F9AB, "M", "𧏊"), - (0x2F9AC, "M", "蕤"), - (0x2F9AD, "M", "𦼬"), - (0x2F9AE, "M", "䕝"), - (0x2F9AF, "M", "䕡"), - (0x2F9B0, "M", "𦾱"), - (0x2F9B1, "M", "𧃒"), - (0x2F9B2, "M", "䕫"), - (0x2F9B3, "M", "虐"), - (0x2F9B4, "M", "虜"), - (0x2F9B5, "M", "虧"), - (0x2F9B6, "M", "虩"), - (0x2F9B7, "M", "蚩"), - (0x2F9B8, "M", "蚈"), - (0x2F9B9, "M", "蜎"), - (0x2F9BA, "M", "蛢"), - (0x2F9BB, "M", "蝹"), - (0x2F9BC, "M", "蜨"), - (0x2F9BD, "M", "蝫"), - (0x2F9BE, "M", "螆"), - (0x2F9BF, "X"), - (0x2F9C0, "M", "蟡"), - (0x2F9C1, "M", "蠁"), - (0x2F9C2, "M", "䗹"), - (0x2F9C3, "M", "衠"), - (0x2F9C4, "M", "衣"), - (0x2F9C5, "M", "𧙧"), - (0x2F9C6, "M", "裗"), - (0x2F9C7, "M", "裞"), - (0x2F9C8, "M", "䘵"), - (0x2F9C9, "M", "裺"), - (0x2F9CA, "M", "㒻"), - (0x2F9CB, "M", "𧢮"), - (0x2F9CC, "M", "𧥦"), - (0x2F9CD, "M", "䚾"), - (0x2F9CE, "M", "䛇"), - (0x2F9CF, "M", "誠"), - (0x2F9D0, "M", "諭"), - (0x2F9D1, "M", "變"), - (0x2F9D2, "M", "豕"), - (0x2F9D3, "M", "𧲨"), - (0x2F9D4, "M", "貫"), - (0x2F9D5, "M", "賁"), - (0x2F9D6, "M", "贛"), - (0x2F9D7, "M", "起"), - ] - - -def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F9D8, "M", "𧼯"), - (0x2F9D9, "M", "𠠄"), - (0x2F9DA, "M", "跋"), - (0x2F9DB, "M", "趼"), - (0x2F9DC, "M", "跰"), - (0x2F9DD, "M", "𠣞"), - (0x2F9DE, "M", "軔"), - (0x2F9DF, "M", "輸"), - (0x2F9E0, "M", "𨗒"), - (0x2F9E1, "M", "𨗭"), - (0x2F9E2, "M", "邔"), - (0x2F9E3, "M", "郱"), - (0x2F9E4, "M", "鄑"), - (0x2F9E5, "M", "𨜮"), - (0x2F9E6, "M", "鄛"), - (0x2F9E7, "M", "鈸"), - (0x2F9E8, "M", "鋗"), - (0x2F9E9, "M", "鋘"), - (0x2F9EA, "M", "鉼"), - (0x2F9EB, "M", "鏹"), - (0x2F9EC, "M", "鐕"), - (0x2F9ED, "M", "𨯺"), - (0x2F9EE, "M", "開"), - (0x2F9EF, "M", "䦕"), - (0x2F9F0, "M", "閷"), - (0x2F9F1, "M", "𨵷"), - (0x2F9F2, "M", "䧦"), - (0x2F9F3, "M", "雃"), - (0x2F9F4, "M", "嶲"), - (0x2F9F5, "M", "霣"), - (0x2F9F6, "M", "𩅅"), - (0x2F9F7, "M", "𩈚"), - (0x2F9F8, "M", "䩮"), - (0x2F9F9, "M", "䩶"), - (0x2F9FA, "M", "韠"), - (0x2F9FB, "M", "𩐊"), - (0x2F9FC, "M", "䪲"), - (0x2F9FD, "M", "𩒖"), - (0x2F9FE, "M", "頋"), - (0x2FA00, "M", "頩"), - (0x2FA01, "M", "𩖶"), - (0x2FA02, "M", "飢"), - (0x2FA03, "M", "䬳"), - (0x2FA04, "M", "餩"), - (0x2FA05, "M", "馧"), - (0x2FA06, "M", "駂"), - (0x2FA07, "M", "駾"), - (0x2FA08, "M", "䯎"), - (0x2FA09, "M", "𩬰"), - (0x2FA0A, "M", "鬒"), - (0x2FA0B, "M", "鱀"), - (0x2FA0C, "M", "鳽"), - (0x2FA0D, "M", "䳎"), - (0x2FA0E, "M", "䳭"), - (0x2FA0F, "M", "鵧"), - (0x2FA10, "M", "𪃎"), - (0x2FA11, "M", "䳸"), - (0x2FA12, "M", "𪄅"), - (0x2FA13, "M", "𪈎"), - (0x2FA14, "M", "𪊑"), - (0x2FA15, "M", "麻"), - (0x2FA16, "M", "䵖"), - (0x2FA17, "M", "黹"), - (0x2FA18, "M", "黾"), - (0x2FA19, "M", "鼅"), - (0x2FA1A, "M", "鼏"), - (0x2FA1B, "M", "鼖"), - (0x2FA1C, "M", "鼻"), - (0x2FA1D, "M", "𪘀"), - (0x2FA1E, "X"), - (0x30000, "V"), - (0x3134B, "X"), - (0x31350, "V"), - (0x323B0, "X"), - (0xE0100, "I"), - (0xE01F0, "X"), - ] - - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() - + _seg_79() - + _seg_80() - + _seg_81() -) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt b/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt deleted file mode 100644 index 7b190ca..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/METADATA deleted file mode 100644 index ddf5464..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.2.0 -Summary: Safely pass data to untrusted environments and back. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/itsdangerous/ - -# ItsDangerous - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -## A Simple Example - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -```python -from itsdangerous import URLSafeSerializer -auth_s = URLSafeSerializer("secret key", "auth") -token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - -print(token) -# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - -data = auth_s.loads(token) -print(data["name"]) -# itsdangerous -``` - - -## Donate - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/RECORD deleted file mode 100644 index 255db20..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924 -itsdangerous-2.2.0.dist-info/RECORD,, -itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427 -itsdangerous/__pycache__/__init__.cpython-310.pyc,, -itsdangerous/__pycache__/_json.cpython-310.pyc,, -itsdangerous/__pycache__/encoding.cpython-310.pyc,, -itsdangerous/__pycache__/exc.cpython-310.pyc,, -itsdangerous/__pycache__/serializer.cpython-310.pyc,, -itsdangerous/__pycache__/signer.cpython-310.pyc,, -itsdangerous/__pycache__/timed.cpython-310.pyc,, -itsdangerous/__pycache__/url_safe.cpython-310.pyc,, -itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473 -itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409 -itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601 -itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647 -itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083 -itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505 diff --git a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__init__.py b/venv/lib/python3.10/site-packages/itsdangerous/__init__.py deleted file mode 100644 index ea55256..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " ItsDangerous 2.3. Use feature detection or" - " 'importlib.metadata.version(\"itsdangerous\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("itsdangerous") - - raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index ca2fccb..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/_json.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/_json.cpython-310.pyc deleted file mode 100644 index edf79fc..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/_json.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/encoding.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/encoding.cpython-310.pyc deleted file mode 100644 index 62ef676..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/encoding.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/exc.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/exc.cpython-310.pyc deleted file mode 100644 index 5d407d9..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/exc.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/serializer.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/serializer.cpython-310.pyc deleted file mode 100644 index 5b7646e..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/serializer.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/signer.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/signer.cpython-310.pyc deleted file mode 100644 index 6e01a80..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/signer.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/timed.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/timed.cpython-310.pyc deleted file mode 100644 index 22d3a2d..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/timed.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/url_safe.cpython-310.pyc b/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/url_safe.cpython-310.pyc deleted file mode 100644 index 6dca1ce..0000000 Binary files a/venv/lib/python3.10/site-packages/itsdangerous/__pycache__/url_safe.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/itsdangerous/_json.py b/venv/lib/python3.10/site-packages/itsdangerous/_json.py deleted file mode 100644 index fc23fea..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: str | bytes) -> t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: t.Any, **kwargs: t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/venv/lib/python3.10/site-packages/itsdangerous/encoding.py b/venv/lib/python3.10/site-packages/itsdangerous/encoding.py deleted file mode 100644 index f5ca80f..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -import base64 -import string -import struct -import typing as t - -from .exc import BadData - - -def want_bytes( - s: str | bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: str | bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: str | bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/venv/lib/python3.10/site-packages/itsdangerous/exc.py b/venv/lib/python3.10/site-packages/itsdangerous/exc.py deleted file mode 100644 index a75adcd..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import annotations - -import typing as t -from datetime import datetime - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: t.Any | None = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: t.Any | None = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - date_signed: datetime | None = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - header: t.Any | None = None, - original_error: Exception | None = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: t.Any | None = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: Exception | None = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error diff --git a/venv/lib/python3.10/site-packages/itsdangerous/py.typed b/venv/lib/python3.10/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.10/site-packages/itsdangerous/serializer.py b/venv/lib/python3.10/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 5ddf387..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,406 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import json -import typing as t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -if t.TYPE_CHECKING: - import typing_extensions as te - - # This should be either be str or bytes. To avoid having to specify the - # bound type, it falls back to a union if structural matching fails. - _TSerialized = te.TypeVar( - "_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes] - ) -else: - # Still available at runtime on Python < 3.13, but without the default. - _TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes]) - - -class _PDataSerializer(t.Protocol[_TSerialized]): - def loads(self, payload: _TSerialized, /) -> t.Any: ... - # A signature with additional arguments is not handled correctly by type - # checkers right now, so an overload is used below for serializers that - # don't match this strict protocol. - def dumps(self, obj: t.Any, /) -> _TSerialized: ... - - -# Use TypeIs once it's available in typing_extensions or 3.13. -def is_text_serializer( - serializer: _PDataSerializer[t.Any], -) -> te.TypeGuard[_PDataSerializer[str]]: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer(t.Generic[_TSerialized]): - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _PDataSerializer[t.Any] = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: type[Signer] = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = [] - - # Serializer[str] if no data serializer is provided, or if it returns str. - @t.overload - def __init__( - self: Serializer[str], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: None | _PDataSerializer[str] = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer positional argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer keyword argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a positional argument. If the strict signature of - # _PDataSerializer doesn't match, fall back to a union, requiring the user - # to specify the type. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a keyword argument. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: t.Any | None = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _PDataSerializer[_TSerialized] = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: type[Signer] = signer - self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers) - - self.fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = fallback_signers - self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None - ) -> t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - use_serializer = self.serializer - is_text = self.is_text_serializer - else: - use_serializer = serializer - is_text = is_text_serializer(serializer) - - try: - if is_text: - return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type] - - return use_serializer.loads(payload) # type: ignore[arg-type] - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: str | bytes | None = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") # type: ignore[return-value] - - return rv # type: ignore[return-value] - - def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any - ) -> t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: str | bytes, salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: str | bytes, - salt: str | bytes | None, - load_kwargs: dict[str, t.Any] | None = None, - load_payload_kwargs: dict[str, t.Any] | None = None, - ) -> tuple[bool, t.Any]: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe( - self, f: t.IO[t.Any], salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/venv/lib/python3.10/site-packages/itsdangerous/signer.py b/venv/lib/python3.10/site-packages/itsdangerous/signer.py deleted file mode 100644 index e324dc0..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,266 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import hashlib -import hmac -import typing as t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - def __init__(self, digest_method: t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list( - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], -) -> list[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] # pyright: ignore - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous.Signer", - sep: str | bytes = b".", - key_derivation: str | None = None, - digest_method: t.Any | None = None, - algorithm: SigningAlgorithm | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: str | bytes | None = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: str | bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: str | bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: str | bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/venv/lib/python3.10/site-packages/itsdangerous/timed.py b/venv/lib/python3.10/site-packages/itsdangerous/timed.py deleted file mode 100644 index 7384375..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,228 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import time -import typing as t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import _TSerialized -from .serializer import Serializer -from .signer import Signer - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @t.overload - def unsign( # type: ignore[overload-overlap] - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[False] = False, - ) -> bytes: ... - - @t.overload - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[True] = True, - ) -> tuple[bytes, datetime]: ... - - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - ) -> tuple[bytes, datetime] | bytes: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: int | None = None - ts_dt: datetime | None = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer[_TSerialized]): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: str | bytes | None = None - ) -> cabc.Iterator[TimestampSigner]: - return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - salt: str | bytes | None = None, - ) -> t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - salt: str | bytes | None = None, - ) -> tuple[bool, t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/venv/lib/python3.10/site-packages/itsdangerous/url_safe.py b/venv/lib/python3.10/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index 56a0793..0000000 --- a/venv/lib/python3.10/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations - -import typing as t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import _PDataSerializer -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer[str]): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer: _PDataSerializer[str] = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: t.Any, - serializer: t.Any | None = None, - **kwargs: t.Any, - ) -> t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/METADATA b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/METADATA deleted file mode 100644 index ffef2ff..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/METADATA +++ /dev/null @@ -1,84 +0,0 @@ -Metadata-Version: 2.4 -Name: Jinja2 -Version: 3.1.6 -Summary: A very fast and expressive template engine. -Maintainer-email: Pallets -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: MarkupSafe>=2.0 -Requires-Dist: Babel>=2.7 ; extra == "i18n" -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/jinja/ -Provides-Extra: i18n - -# Jinja - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -## In A Nutshell - -```jinja -{% extends "base.html" %} -{% block title %}Members{% endblock %} -{% block content %} - -{% endblock %} -``` - -## Donate - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/RECORD b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/RECORD deleted file mode 100644 index 20d4fee..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/RECORD +++ /dev/null @@ -1,57 +0,0 @@ -jinja2-3.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jinja2-3.1.6.dist-info/METADATA,sha256=aMVUj7Z8QTKhOJjZsx7FDGvqKr3ZFdkh8hQ1XDpkmcg,2871 -jinja2-3.1.6.dist-info/RECORD,, -jinja2-3.1.6.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 -jinja2-3.1.6.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 -jinja2-3.1.6.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -jinja2/__init__.py,sha256=xxepO9i7DHsqkQrgBEduLtfoz2QCuT6_gbL4XSN1hbU,1928 -jinja2/__pycache__/__init__.cpython-310.pyc,, -jinja2/__pycache__/_identifier.cpython-310.pyc,, -jinja2/__pycache__/async_utils.cpython-310.pyc,, -jinja2/__pycache__/bccache.cpython-310.pyc,, -jinja2/__pycache__/compiler.cpython-310.pyc,, -jinja2/__pycache__/constants.cpython-310.pyc,, -jinja2/__pycache__/debug.cpython-310.pyc,, -jinja2/__pycache__/defaults.cpython-310.pyc,, -jinja2/__pycache__/environment.cpython-310.pyc,, -jinja2/__pycache__/exceptions.cpython-310.pyc,, -jinja2/__pycache__/ext.cpython-310.pyc,, -jinja2/__pycache__/filters.cpython-310.pyc,, -jinja2/__pycache__/idtracking.cpython-310.pyc,, -jinja2/__pycache__/lexer.cpython-310.pyc,, -jinja2/__pycache__/loaders.cpython-310.pyc,, -jinja2/__pycache__/meta.cpython-310.pyc,, -jinja2/__pycache__/nativetypes.cpython-310.pyc,, -jinja2/__pycache__/nodes.cpython-310.pyc,, -jinja2/__pycache__/optimizer.cpython-310.pyc,, -jinja2/__pycache__/parser.cpython-310.pyc,, -jinja2/__pycache__/runtime.cpython-310.pyc,, -jinja2/__pycache__/sandbox.cpython-310.pyc,, -jinja2/__pycache__/tests.cpython-310.pyc,, -jinja2/__pycache__/utils.cpython-310.pyc,, -jinja2/__pycache__/visitor.cpython-310.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=vK-PdsuorOMnWSnEkT3iUJRIkTnYgO2T6MnGxDgHI5o,2834 -jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 -jinja2/compiler.py,sha256=9RpCQl5X88BHllJiPsHPh295Hh0uApvwFJNQuutULeM,74131 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=CnHqCDHd-BVGvti_8ZsTolnXNhA3ECsY-6n_2pwU8Hw,6297 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=9nhrP7Ch-NbGX00wvyr4yy-uhNHq2OCc60ggGrni_fk,61513 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=5PF5eHfh8mXAIxXHHRB2xXbXohi8pE3nHSOxa66uS7E,31875 -jinja2/filters.py,sha256=PQ_Egd9n9jSgtnGQYyF4K5j2nYwhUIulhPnyimkdr-k,55212 -jinja2/idtracking.py,sha256=-ll5lIp73pML3ErUYiIJj7tdmWxcH_IlDv3yA_hiZYo,10555 -jinja2/lexer.py,sha256=LYiYio6br-Tep9nPcupWXsPEtjluw3p1mU-lNBVRUfk,29786 -jinja2/loaders.py,sha256=wIrnxjvcbqh5VwW28NSkfotiDq8qNCxIOSFbGUiSLB4,24055 -jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 -jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 -jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 -jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 -jinja2/parser.py,sha256=lLOFy3sEmHc5IaEHRiH1sQVnId2moUQzhyeJZTtdY30,40383 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=gDk-GvdriJXqgsGbHgrcKTP0Yp6zPXzhzrIpCFH3jAU,34249 -jinja2/sandbox.py,sha256=Mw2aitlY2I8la7FYhcX2YG9BtUYcLnD0Gh3d29cDWrY,15009 -jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 -jinja2/utils.py,sha256=rRp3o9e7ZKS4fyrWRbELyLcpuGVTFcnooaOa1qx_FIk,24129 -jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/WHEEL b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/WHEEL deleted file mode 100644 index 23d2d7e..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.11.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/entry_points.txt deleted file mode 100644 index abc3eae..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[babel.extractors] -jinja2=jinja2.ext:babel_extract[i18n] - diff --git a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt b/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index c37cae4..0000000 --- a/venv/lib/python3.10/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/jinja2/__init__.py b/venv/lib/python3.10/site-packages/jinja2/__init__.py deleted file mode 100644 index 1a423a3..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" - -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.6" diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index a818a6d..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/_identifier.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/_identifier.cpython-310.pyc deleted file mode 100644 index 70df61b..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/_identifier.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/async_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/async_utils.cpython-310.pyc deleted file mode 100644 index 19c0303..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/async_utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/bccache.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/bccache.cpython-310.pyc deleted file mode 100644 index 5fb44fa..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/bccache.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/compiler.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/compiler.cpython-310.pyc deleted file mode 100644 index 3f7cc00..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/compiler.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/constants.cpython-310.pyc deleted file mode 100644 index 18275a3..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/constants.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/debug.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/debug.cpython-310.pyc deleted file mode 100644 index f45f307..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/debug.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/defaults.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/defaults.cpython-310.pyc deleted file mode 100644 index 19633c9..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/defaults.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/environment.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/environment.cpython-310.pyc deleted file mode 100644 index 278b1bd..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/environment.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/exceptions.cpython-310.pyc deleted file mode 100644 index c9983fa..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/exceptions.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/ext.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/ext.cpython-310.pyc deleted file mode 100644 index 1a749ca..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/ext.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/filters.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/filters.cpython-310.pyc deleted file mode 100644 index 92e9cc0..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/filters.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/idtracking.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/idtracking.cpython-310.pyc deleted file mode 100644 index 08ce64b..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/idtracking.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/lexer.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/lexer.cpython-310.pyc deleted file mode 100644 index ae83c24..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/lexer.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/loaders.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/loaders.cpython-310.pyc deleted file mode 100644 index f8e3dc9..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/loaders.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/meta.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/meta.cpython-310.pyc deleted file mode 100644 index 8fd9af4..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/meta.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/nativetypes.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/nativetypes.cpython-310.pyc deleted file mode 100644 index 14b1981..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/nativetypes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/nodes.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/nodes.cpython-310.pyc deleted file mode 100644 index ce9640c..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/nodes.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/optimizer.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/optimizer.cpython-310.pyc deleted file mode 100644 index 9c34526..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/optimizer.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/parser.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/parser.cpython-310.pyc deleted file mode 100644 index e7d15de..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/parser.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/runtime.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/runtime.cpython-310.pyc deleted file mode 100644 index b945024..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/runtime.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/sandbox.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/sandbox.cpython-310.pyc deleted file mode 100644 index b3d819a..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/sandbox.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/tests.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/tests.cpython-310.pyc deleted file mode 100644 index 1dd2412..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/tests.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index b82645c..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/__pycache__/visitor.cpython-310.pyc b/venv/lib/python3.10/site-packages/jinja2/__pycache__/visitor.cpython-310.pyc deleted file mode 100644 index c61f75c..0000000 Binary files a/venv/lib/python3.10/site-packages/jinja2/__pycache__/visitor.cpython-310.pyc and /dev/null differ diff --git a/venv/lib/python3.10/site-packages/jinja2/_identifier.py b/venv/lib/python3.10/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/venv/lib/python3.10/site-packages/jinja2/async_utils.py b/venv/lib/python3.10/site-packages/jinja2/async_utils.py deleted file mode 100644 index f0c1402..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,99 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -if t.TYPE_CHECKING: - import typing_extensions as te - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True # type: ignore[attr-defined] - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return value - - -class _IteratorToAsyncIterator(t.Generic[V]): - def __init__(self, iterator: "t.Iterator[V]"): - self._iterator = iterator - - def __aiter__(self) -> "te.Self": - return self - - async def __anext__(self) -> V: - try: - return next(self._iterator) - except StopIteration as e: - raise StopAsyncIteration(e.value) from e - - -def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - return iterable.__aiter__() - else: - return _IteratorToAsyncIterator(iter(iterable)) - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/venv/lib/python3.10/site-packages/jinja2/bccache.py b/venv/lib/python3.10/site-packages/jinja2/bccache.py deleted file mode 100644 index ada8b09..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,408 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" - -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: ... - - def set( - self, key: str, value: bytes, timeout: t.Optional[int] = None - ) -> None: ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/venv/lib/python3.10/site-packages/jinja2/compiler.py b/venv/lib/python3.10/site-packages/jinja2/compiler.py deleted file mode 100644 index a4ff6a1..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1998 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" - -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(new_func, f) # type: ignore[return-value] - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: # noqa E721 - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "te.Self": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "te.Self": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in ( - (self.filters, visitor.filters, "filters"), - ( - self.tests, - visitor.tests, - "tests", - ), - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(sorted(vars)): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, sorted(public_names))) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import async_exported - from .runtime import exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline("agen = parent_template.root_render_func(context)") - self.writeline("try:") - self.indent() - self.writeline("async for event in agen:") - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent() - self.writeline("finally: await agen.aclose()") - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline(f"gen = context.blocks[{node.name!r}][0]({context})") - self.writeline("try:") - self.indent() - self.writeline( - f"{self.choose_async()}for event in gen:", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - def loop_body() -> None: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.with_context: - self.writeline( - f"gen = template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)}))" - ) - self.writeline("try:") - self.indent() - self.writeline(f"{self.choose_async()}for event in gen:") - loop_body() - self.outdent() - self.writeline( - f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - loop_body() - else: - self.writeline("yield from template._get_default_module()._body_stream") - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - # The position will contain the template name, and will be formatted - # into a string that will be compiled into an f-string. Curly braces - # in the name must be replaced with escapes so that they will not be - # executed as part of the f-string. - position = self.position(node).replace("{", "{{").replace("}", "}}") - message = ( - "the template {included_template.__name__!r}" - f" (imported on {position})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - - # ``a.b`` is allowed for assignment, and is parsed as an NSRef. However, - # it is only valid if it references a Namespace object. Emit a check for - # that for each ref here, before assignment code is emitted. This can't - # be done in visit_NSRef as the ref could be in the middle of a tuple. - seen_refs: t.Set[str] = set() - - for nsref in node.find_all(nodes.NSRef): - if nsref.name in seen_refs: - # Only emit the check for each reference once, in case the same - # ref is used multiple times in a tuple, `ns.a, ns.b = c, d`. - continue - - seen_refs.add(nsref.name) - ref = frame.symbols.ref(nsref.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRef is a dotted assignment target a.b=c, but uses a[b]=c internally. - # visit_Assign emits code to validate that each ref is to a Namespace - # object only. That can't be emitted here as the ref could be in the - # middle of a tuple assignment. - ref = frame.symbols.ref(node.name) - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/venv/lib/python3.10/site-packages/jinja2/constants.py b/venv/lib/python3.10/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/venv/lib/python3.10/site-packages/jinja2/debug.py b/venv/lib/python3.10/site-packages/jinja2/debug.py deleted file mode 100644 index eeeeee7..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: t.Optional[Context] = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/venv/lib/python3.10/site-packages/jinja2/defaults.py b/venv/lib/python3.10/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/venv/lib/python3.10/site-packages/jinja2/environment.py b/venv/lib/python3.10/site-packages/jinja2/environment.py deleted file mode 100644 index 0fc6e5b..0000000 --- a/venv/lib/python3.10/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1672 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" - -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping[t.Any, t.Any]], -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: # noqa E721 - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: _env_bound) -> _env_bound: - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = missing, - ) -> "te.Self": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.5 - ``enable_async`` is applied correctly. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "