Also add venv (not optimal)
This commit is contained in:
76
.venv/bin/activate
Normal file
76
.venv/bin/activate
Normal file
@@ -0,0 +1,76 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "$1" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV="/tmp/.venv"
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
if [ "x(.venv) " != x ] ; then
|
||||
PS1="(.venv) ${PS1:-}"
|
||||
else
|
||||
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
|
||||
else
|
||||
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
|
||||
fi
|
||||
fi
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
||||
37
.venv/bin/activate.csh
Normal file
37
.venv/bin/activate.csh
Normal file
@@ -0,0 +1,37 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/tmp/.venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
if (".venv" != "") then
|
||||
set env_name = ".venv"
|
||||
else
|
||||
if (`basename "VIRTUAL_ENV"` == "__") then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
|
||||
else
|
||||
set env_name = `basename "$VIRTUAL_ENV"`
|
||||
endif
|
||||
endif
|
||||
set prompt = "[$env_name] $prompt"
|
||||
unset env_name
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
75
.venv/bin/activate.fish
Normal file
75
.venv/bin/activate.fish
Normal file
@@ -0,0 +1,75 @@
|
||||
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
|
||||
# you cannot run it directly
|
||||
|
||||
function deactivate -d "Exit virtualenv and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
functions -e fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/tmp/.venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# save the current fish_prompt function as the function _old_fish_prompt
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# with the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command
|
||||
set -l old_status $status
|
||||
|
||||
# Prompt override?
|
||||
if test -n "(.venv) "
|
||||
printf "%s%s" "(.venv) " (set_color normal)
|
||||
else
|
||||
# ...Otherwise, prepend env
|
||||
set -l _checkbase (basename "$VIRTUAL_ENV")
|
||||
if test $_checkbase = "__"
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
|
||||
else
|
||||
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
|
||||
end
|
||||
end
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
||||
10
.venv/bin/easy_install
Executable file
10
.venv/bin/easy_install
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/easy_install-3.7
Executable file
10
.venv/bin/easy_install-3.7
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/jinja2
Executable file
10
.venv/bin/jinja2
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from jinja2cli import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/pip
Executable file
10
.venv/bin/pip
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/pip3
Executable file
10
.venv/bin/pip3
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/pip3.7
Executable file
10
.venv/bin/pip3.7
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
BIN
.venv/bin/python
Executable file
BIN
.venv/bin/python
Executable file
Binary file not shown.
BIN
.venv/bin/python3
Executable file
BIN
.venv/bin/python3
Executable file
Binary file not shown.
10
.venv/bin/wheel
Executable file
10
.venv/bin/wheel
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from wheel.cli import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
10
.venv/bin/yamllint
Executable file
10
.venv/bin/yamllint
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/tmp/.venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from yamllint.cli import run
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run())
|
||||
@@ -0,0 +1,66 @@
|
||||
Cerberus is developed and maintained by the Cerberus community. It was created
|
||||
by Nicola Iarocci.
|
||||
|
||||
Core maintainers
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
- Nicola Iarocci (nicolaiarocci)
|
||||
- Frank Sachsenheim (funkyfuture)
|
||||
|
||||
Contributors
|
||||
~~~~~~~~~~~~
|
||||
|
||||
- Antoine Lubineau
|
||||
- Arsh Singh
|
||||
- Audric Schiltknecht
|
||||
- Brandon Aubie
|
||||
- Brett
|
||||
- Bruno Oliveira
|
||||
- Bryan W. Weber
|
||||
- C.D. Clark III
|
||||
- Christian Hogan
|
||||
- Connor Zapfel
|
||||
- Damián Nohales
|
||||
- Danielle Pizzolli
|
||||
- Davis Kirkendall
|
||||
- Denis Carriere
|
||||
- Dominik Kellner
|
||||
- Eelke Hermens
|
||||
- Evgeny Odegov
|
||||
- Florian Rathgeber
|
||||
- Gabriel Wainer
|
||||
- Harro van der Klauw
|
||||
- Jaroslav Semančík
|
||||
- Jonathan Huot
|
||||
- Kaleb Pomeroy
|
||||
- Kirill Pavlov
|
||||
- Kornelijus Survila
|
||||
- Lujeni
|
||||
- Luke Bechtel
|
||||
- Luo Peng
|
||||
- Martijn Vermaat
|
||||
- Martin Ortbauer
|
||||
- Matthew Ellison
|
||||
- Michael Klich
|
||||
- Nik Haldimann
|
||||
- Nikita Melentev
|
||||
- Nikita Vlaznev
|
||||
- Paul Weaver
|
||||
- Peter Demin
|
||||
- Riccardo
|
||||
- Roman Redkovich
|
||||
- Scott Crunkleton
|
||||
- Sebastian Heid
|
||||
- Sebastian Rajo
|
||||
- Sergey Leshchenko
|
||||
- Tobias Betz
|
||||
- Trong Hieu HA
|
||||
- Vipul Gupta
|
||||
- Waldir Pimenta
|
||||
- Yauhen Shulitski
|
||||
- calve
|
||||
- gilbsgilbs
|
||||
|
||||
A full, up-to-date list of contributors is available from git with:
|
||||
|
||||
git shortlog -sne
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,15 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2012-2016 Nicola Iarocci.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
@@ -0,0 +1,165 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Cerberus
|
||||
Version: 1.3.5
|
||||
Summary: Lightweight, extensible schema and data validation tool for Pythondictionaries.
|
||||
Author-email: Nicola Iarocci <nicola@nicolaiarocci.com>
|
||||
Maintainer-email: Frank Sachsenheim <funkyfuture@riseup.net>
|
||||
License: ISC License
|
||||
|
||||
Copyright (c) 2012-2016 Nicola Iarocci.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
Project-URL: Documentation, http://docs.python-cerberus.org
|
||||
Project-URL: Repository, https://github.com/pyeve/cerberus
|
||||
Keywords: validation,schema,dictionaries,documents,normalization
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: License :: OSI Approved :: ISC License (ISCL)
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
License-File: AUTHORS
|
||||
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||
|
||||
Cerberus |latest-version|
|
||||
=========================
|
||||
|python-support| |black|
|
||||
|
||||
Cerberus is a lightweight and extensible data validation library for Python.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> v = Validator({'name': {'type': 'string'}})
|
||||
>>> v.validate({'name': 'john doe'})
|
||||
True
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
Cerberus provides type checking and other base functionality out of the box and
|
||||
is designed to be non-blocking and easily and widely extensible, allowing for
|
||||
custom validation. It has no dependencies, but has the potential to become
|
||||
yours.
|
||||
|
||||
|
||||
Versioning & Interpreter support
|
||||
--------------------------------
|
||||
|
||||
Starting with Cerberus 1.2, it is maintained according to
|
||||
`semantic versioning`_. So, a major release sheds off the old and defines a
|
||||
space for the new, minor releases ship further new features and improvements
|
||||
(you know the drill, new bugs are inevitable too), and micro releases polish a
|
||||
definite amount of features to glory.
|
||||
|
||||
We intend to test Cerberus against all CPython interpreters at least until half
|
||||
a year after their `end of life`_ and against the most recent PyPy interpreter
|
||||
as a requirement for a release. If you still need to use it with a potential
|
||||
security hole in your setup, it should most probably work with the latest
|
||||
minor version branch from the time when the interpreter was still tested.
|
||||
Subsequent minor versions have good chances as well. In any case, you are
|
||||
advised to run the contributed test suite on your target system.
|
||||
|
||||
|
||||
Funding
|
||||
-------
|
||||
|
||||
Cerberus is an open source, collaboratively funded project. If you run a
|
||||
business and are using Cerberus in a revenue-generating product, it would
|
||||
make business sense to sponsor its development: it ensures the project that
|
||||
your product relies on stays healthy and actively maintained. Individual users
|
||||
are also welcome to make a recurring pledge or a one time donation if Cerberus
|
||||
has helped you in your work or personal projects.
|
||||
|
||||
Every single sign-up makes a significant impact towards making Eve possible. To
|
||||
learn more, check out our `funding page`_.
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Complete documentation is available at http://docs.python-cerberus.org
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Cerberus is on PyPI_, so all you need to do is:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install cerberus
|
||||
|
||||
|
||||
Testing
|
||||
-------
|
||||
|
||||
Just run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py test
|
||||
|
||||
Or you can use tox to run the tests under all supported Python versions. Make
|
||||
sure the required python versions are installed and run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install tox # first time only
|
||||
$ tox
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Please see the `Contribution Guidelines`_.
|
||||
|
||||
|
||||
Copyright
|
||||
---------
|
||||
|
||||
Cerberus is an open source project by `Nicola Iarocci`_. See the license_ file
|
||||
for more information.
|
||||
|
||||
|
||||
.. _Contribution Guidelines: https://github.com/pyeve/cerberus/blob/1.3.x/CONTRIBUTING.rst
|
||||
.. _end of life: https://devguide.python.org/#status-of-python-branches
|
||||
.. _funding page: http://docs.python-cerberus.org/en/latest/funding.html
|
||||
.. _license: https://github.com/pyeve/cerberus/blob/1.3.x/LICENSE
|
||||
.. _Nicola Iarocci: https://nicolaiarocci.com/
|
||||
.. _PyPI: https://pypi.python.org/
|
||||
.. _semantic versioning: https://semver.org/
|
||||
|
||||
.. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:alt: Black code style
|
||||
:target: https://black.readthedocs.io/
|
||||
.. |latest-version| image:: https://img.shields.io/pypi/v/cerberus.svg
|
||||
:alt: Latest version on PyPI
|
||||
:target: https://pypi.org/project/cerberus
|
||||
.. |license| image:: https://img.shields.io/pypi/l/cerberus.svg
|
||||
:alt: Software license
|
||||
:target: https://github.com/pyeve/cerberus/blob/1.3.x/LICENSE
|
||||
.. |python-support| image:: https://img.shields.io/pypi/pyversions/cerberus.svg
|
||||
:target: https://pypi.python.org/pypi/cerberus
|
||||
:alt: Python versions
|
||||
@@ -0,0 +1,19 @@
|
||||
Cerberus-1.3.5.dist-info/AUTHORS,sha256=Wa5cbyooET3QnA5rDV6trSRxay3If8IuT0HEeEPHSSo,1145
|
||||
Cerberus-1.3.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Cerberus-1.3.5.dist-info/LICENSE,sha256=OXJkvLKH9kPVx7jBhG566vGRH4I2svi9759-bxzy__k,751
|
||||
Cerberus-1.3.5.dist-info/METADATA,sha256=PVgGeqhMCCgtZE9w53Ud5zrJhFhFhxTOIsLYacwML3s,5991
|
||||
Cerberus-1.3.5.dist-info/RECORD,,
|
||||
Cerberus-1.3.5.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
|
||||
Cerberus-1.3.5.dist-info/top_level.txt,sha256=ZwdViFmandWj-jK09wLSDM63moDFYE46vDo7tRb-1HE,9
|
||||
cerberus/__init__.py,sha256=oPjZ53fBjQga3k7dUmD4JTYGLFUWVsJlprRL4yhU-fE,819
|
||||
cerberus/__pycache__/__init__.cpython-37.pyc,,
|
||||
cerberus/__pycache__/errors.cpython-37.pyc,,
|
||||
cerberus/__pycache__/platform.cpython-37.pyc,,
|
||||
cerberus/__pycache__/schema.cpython-37.pyc,,
|
||||
cerberus/__pycache__/utils.cpython-37.pyc,,
|
||||
cerberus/__pycache__/validator.cpython-37.pyc,,
|
||||
cerberus/errors.py,sha256=VbTF6C6eTDYlwpXS0IW4gCMDbRf9FHOftIzuGS-yl0o,21252
|
||||
cerberus/platform.py,sha256=v4RCKKfWEw2mmG-F8f37QWGo2bBpVjd30nJEsouQo1s,1159
|
||||
cerberus/schema.py,sha256=pg7Oig1mKJtpCXDC7vb2iZ3zfI3ftihtcu_U6TrYVhg,18335
|
||||
cerberus/utils.py,sha256=IbWRFyNtBLz38pJsT2ogcaBzjHHyr59j2nqDyMH5czM,3875
|
||||
cerberus/validator.py,sha256=xuHR5Xo2WVFF2XQAx-VGLx3-CD1W2UDh6R6-DiiFTTI,64766
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.41.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
cerberus
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,28 @@
|
||||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -0,0 +1,105 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Jinja2
|
||||
Version: 3.1.3
|
||||
Summary: A very fast and expressive template engine.
|
||||
Home-page: https://palletsprojects.com/p/jinja/
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://jinja.palletsprojects.com/
|
||||
Project-URL: Changes, https://jinja.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/jinja/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.rst
|
||||
Requires-Dist: MarkupSafe >=2.0
|
||||
Provides-Extra: i18n
|
||||
Requires-Dist: Babel >=2.7 ; extra == 'i18n'
|
||||
|
||||
Jinja
|
||||
=====
|
||||
|
||||
Jinja is a fast, expressive, extensible templating engine. Special
|
||||
placeholders in the template allow writing code similar to Python
|
||||
syntax. Then the template is passed data to render the final document.
|
||||
|
||||
It includes:
|
||||
|
||||
- Template inheritance and inclusion.
|
||||
- Define and import macros within templates.
|
||||
- HTML templates can use autoescaping to prevent XSS from untrusted
|
||||
user input.
|
||||
- A sandboxed environment can safely render untrusted templates.
|
||||
- AsyncIO support for generating templates and calling async
|
||||
functions.
|
||||
- I18N support with Babel.
|
||||
- Templates are compiled to optimized Python code just-in-time and
|
||||
cached, or can be compiled ahead-of-time.
|
||||
- Exceptions point to the correct line in templates to make debugging
|
||||
easier.
|
||||
- Extensible filters, tests, functions, and even syntax.
|
||||
|
||||
Jinja's philosophy is that while application logic belongs in Python if
|
||||
possible, it shouldn't make the template designer's job difficult by
|
||||
restricting functionality too much.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U Jinja2
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
In A Nutshell
|
||||
-------------
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% extends "base.html" %}
|
||||
{% block title %}Members{% endblock %}
|
||||
{% block content %}
|
||||
<ul>
|
||||
{% for user in users %}
|
||||
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Jinja and other popular
|
||||
packages. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, `please
|
||||
donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://jinja.palletsprojects.com/
|
||||
- Changes: https://jinja.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/Jinja2/
|
||||
- Source Code: https://github.com/pallets/jinja/
|
||||
- Issue Tracker: https://github.com/pallets/jinja/issues/
|
||||
- Chat: https://discord.gg/pallets
|
||||
@@ -0,0 +1,58 @@
|
||||
Jinja2-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Jinja2-3.1.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
||||
Jinja2-3.1.3.dist-info/METADATA,sha256=0cLNbRCI91jytc7Bzv3XAQfZzFDF2gxkJuH46eF5vew,3301
|
||||
Jinja2-3.1.3.dist-info/RECORD,,
|
||||
Jinja2-3.1.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
||||
Jinja2-3.1.3.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59
|
||||
Jinja2-3.1.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
|
||||
jinja2/__init__.py,sha256=NTBwMwsECrdHmxeXF7seusHLzrh6Ldn1A9qhS5cDuf0,1927
|
||||
jinja2/__pycache__/__init__.cpython-37.pyc,,
|
||||
jinja2/__pycache__/_identifier.cpython-37.pyc,,
|
||||
jinja2/__pycache__/async_utils.cpython-37.pyc,,
|
||||
jinja2/__pycache__/bccache.cpython-37.pyc,,
|
||||
jinja2/__pycache__/compiler.cpython-37.pyc,,
|
||||
jinja2/__pycache__/constants.cpython-37.pyc,,
|
||||
jinja2/__pycache__/debug.cpython-37.pyc,,
|
||||
jinja2/__pycache__/defaults.cpython-37.pyc,,
|
||||
jinja2/__pycache__/environment.cpython-37.pyc,,
|
||||
jinja2/__pycache__/exceptions.cpython-37.pyc,,
|
||||
jinja2/__pycache__/ext.cpython-37.pyc,,
|
||||
jinja2/__pycache__/filters.cpython-37.pyc,,
|
||||
jinja2/__pycache__/idtracking.cpython-37.pyc,,
|
||||
jinja2/__pycache__/lexer.cpython-37.pyc,,
|
||||
jinja2/__pycache__/loaders.cpython-37.pyc,,
|
||||
jinja2/__pycache__/meta.cpython-37.pyc,,
|
||||
jinja2/__pycache__/nativetypes.cpython-37.pyc,,
|
||||
jinja2/__pycache__/nodes.cpython-37.pyc,,
|
||||
jinja2/__pycache__/optimizer.cpython-37.pyc,,
|
||||
jinja2/__pycache__/parser.cpython-37.pyc,,
|
||||
jinja2/__pycache__/runtime.cpython-37.pyc,,
|
||||
jinja2/__pycache__/sandbox.cpython-37.pyc,,
|
||||
jinja2/__pycache__/tests.cpython-37.pyc,,
|
||||
jinja2/__pycache__/utils.cpython-37.pyc,,
|
||||
jinja2/__pycache__/visitor.cpython-37.pyc,,
|
||||
jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958
|
||||
jinja2/async_utils.py,sha256=dFcmh6lMNfbh7eLKrBio8JqAKLHdZbpCuurFN4OERtY,2447
|
||||
jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061
|
||||
jinja2/compiler.py,sha256=PJzYdRLStlEOqmnQs1YxlizPrJoj3jTZuUleREn6AIQ,72199
|
||||
jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
|
||||
jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299
|
||||
jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
|
||||
jinja2/environment.py,sha256=0qldX3VQKZcm6lgn7zHz94oRFow7YPYERiqkquomNjU,61253
|
||||
jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
|
||||
jinja2/ext.py,sha256=5fnMpllaXkfm2P_93RIvi-OnK7Tk8mCW8Du-GcD12Hc,31844
|
||||
jinja2/filters.py,sha256=vYjKb2zaPShvYtn_LpSmqfS8SScbrA_KOanNibsMDIE,53862
|
||||
jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704
|
||||
jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726
|
||||
jinja2/loaders.py,sha256=ayAwxfrA1SAffQta0nwSDm3TDT4KYiIGN_D9Z45B310,23085
|
||||
jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
|
||||
jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210
|
||||
jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
|
||||
jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
|
||||
jinja2/parser.py,sha256=Y199wPL-G67gJoi5G_5sHuu9uEP1PJkjjLEW_xTH8-k,39736
|
||||
jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jinja2/runtime.py,sha256=_6LkKIWFJjQdqlrgA3K39zBFQ-7Orm3wGDm96RwxQoE,33406
|
||||
jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584
|
||||
jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
|
||||
jinja2/utils.py,sha256=IMwRIcN1SsTw2-jdQtlH2KzNABsXZBW_-tnFXafQBvY,23933
|
||||
jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.42.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
[babel.extractors]
|
||||
jinja2 = jinja2.ext:babel_extract[i18n]
|
||||
@@ -0,0 +1 @@
|
||||
jinja2
|
||||
@@ -0,0 +1,93 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: MarkupSafe
|
||||
Version: 2.1.5
|
||||
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||
Home-page: https://palletsprojects.com/p/markupsafe/
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/markupsafe/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Description: MarkupSafe
|
||||
==========
|
||||
|
||||
MarkupSafe implements a text object that escapes characters so it is
|
||||
safe to use in HTML and XML. Characters that have special meanings are
|
||||
replaced so that they display as the actual characters. This mitigates
|
||||
injection attacks, meaning untrusted user input can safely be displayed
|
||||
on a page.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U MarkupSafe
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from markupsafe import Markup, escape
|
||||
|
||||
>>> # escape replaces special characters and wraps in Markup
|
||||
>>> escape("<script>alert(document.cookie);</script>")
|
||||
Markup('<script>alert(document.cookie);</script>')
|
||||
|
||||
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||
>>> Markup("<strong>Hello</strong>")
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> escape(Markup("<strong>Hello</strong>"))
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> # Markup is a str subclass
|
||||
>>> # methods and operators escape their arguments
|
||||
>>> template = Markup("Hello <em>{name}</em>")
|
||||
>>> template.format(name='"World"')
|
||||
Markup('Hello <em>"World"</em>')
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports MarkupSafe and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
`please donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://markupsafe.palletsprojects.com/
|
||||
- Changes: https://markupsafe.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/MarkupSafe/
|
||||
- Source Code: https://github.com/pallets/markupsafe/
|
||||
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >= 3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
@@ -0,0 +1,35 @@
|
||||
CHANGES.rst
|
||||
LICENSE.rst
|
||||
MANIFEST.in
|
||||
README.rst
|
||||
setup.cfg
|
||||
setup.py
|
||||
tox.ini
|
||||
docs/Makefile
|
||||
docs/changes.rst
|
||||
docs/conf.py
|
||||
docs/escaping.rst
|
||||
docs/formatting.rst
|
||||
docs/html.rst
|
||||
docs/index.rst
|
||||
docs/license.rst
|
||||
docs/make.bat
|
||||
requirements/build.txt
|
||||
requirements/dev.txt
|
||||
requirements/docs.txt
|
||||
requirements/tests.txt
|
||||
requirements/typing.txt
|
||||
src/MarkupSafe.egg-info/PKG-INFO
|
||||
src/MarkupSafe.egg-info/SOURCES.txt
|
||||
src/MarkupSafe.egg-info/dependency_links.txt
|
||||
src/MarkupSafe.egg-info/top_level.txt
|
||||
src/markupsafe/__init__.py
|
||||
src/markupsafe/_native.py
|
||||
src/markupsafe/_speedups.c
|
||||
src/markupsafe/_speedups.pyi
|
||||
src/markupsafe/py.typed
|
||||
tests/conftest.py
|
||||
tests/test_escape.py
|
||||
tests/test_exception_custom_html.py
|
||||
tests/test_leak.py
|
||||
tests/test_markupsafe.py
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
../markupsafe/__init__.py
|
||||
../markupsafe/__pycache__/__init__.cpython-37.pyc
|
||||
../markupsafe/__pycache__/_native.cpython-37.pyc
|
||||
../markupsafe/_native.py
|
||||
../markupsafe/_speedups.c
|
||||
../markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so
|
||||
../markupsafe/_speedups.pyi
|
||||
../markupsafe/py.typed
|
||||
PKG-INFO
|
||||
SOURCES.txt
|
||||
dependency_links.txt
|
||||
top_level.txt
|
||||
@@ -0,0 +1 @@
|
||||
markupsafe
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2017-2021 Ingy döt Net
|
||||
Copyright (c) 2006-2016 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -0,0 +1,46 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyYAML
|
||||
Version: 6.0.1
|
||||
Summary: YAML parser and emitter for Python
|
||||
Home-page: https://pyyaml.org/
|
||||
Download-URL: https://pypi.org/project/PyYAML/
|
||||
Author: Kirill Simonov
|
||||
Author-email: xi@resolvent.net
|
||||
License: MIT
|
||||
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
||||
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
||||
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
||||
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
||||
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
||||
Platform: Any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup
|
||||
Requires-Python: >=3.6
|
||||
License-File: LICENSE
|
||||
|
||||
YAML is a data serialization format designed for human readability
|
||||
and interaction with scripting languages. PyYAML is a YAML parser
|
||||
and emitter for Python.
|
||||
|
||||
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
||||
support, capable extension API, and sensible error messages. PyYAML
|
||||
supports standard YAML tags and provides Python-specific tags that
|
||||
allow to represent an arbitrary Python object.
|
||||
|
||||
PyYAML is applicable for a broad range of tasks from complex
|
||||
configuration files to object serialization and persistence.
|
||||
@@ -0,0 +1,42 @@
|
||||
PyYAML-6.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyYAML-6.0.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
||||
PyYAML-6.0.1.dist-info/METADATA,sha256=UNNF8-SzzwOKXVo-kV5lXUGH2_wDWMBmGxqISpp5HQk,2058
|
||||
PyYAML-6.0.1.dist-info/RECORD,,
|
||||
PyYAML-6.0.1.dist-info/WHEEL,sha256=f2kJt0KSgFuwuHtWUSz6w6PHLbc-ZwVrVTUjwVNK4Mc,104
|
||||
PyYAML-6.0.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
||||
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
||||
_yaml/__pycache__/__init__.cpython-37.pyc,,
|
||||
yaml/__init__.py,sha256=bhl05qSeO-1ZxlSRjGrvl2m9nrXb1n9-GQatTN0Mrqc,12311
|
||||
yaml/__pycache__/__init__.cpython-37.pyc,,
|
||||
yaml/__pycache__/composer.cpython-37.pyc,,
|
||||
yaml/__pycache__/constructor.cpython-37.pyc,,
|
||||
yaml/__pycache__/cyaml.cpython-37.pyc,,
|
||||
yaml/__pycache__/dumper.cpython-37.pyc,,
|
||||
yaml/__pycache__/emitter.cpython-37.pyc,,
|
||||
yaml/__pycache__/error.cpython-37.pyc,,
|
||||
yaml/__pycache__/events.cpython-37.pyc,,
|
||||
yaml/__pycache__/loader.cpython-37.pyc,,
|
||||
yaml/__pycache__/nodes.cpython-37.pyc,,
|
||||
yaml/__pycache__/parser.cpython-37.pyc,,
|
||||
yaml/__pycache__/reader.cpython-37.pyc,,
|
||||
yaml/__pycache__/representer.cpython-37.pyc,,
|
||||
yaml/__pycache__/resolver.cpython-37.pyc,,
|
||||
yaml/__pycache__/scanner.cpython-37.pyc,,
|
||||
yaml/__pycache__/serializer.cpython-37.pyc,,
|
||||
yaml/__pycache__/tokens.cpython-37.pyc,,
|
||||
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
||||
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
||||
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
||||
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
||||
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
||||
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
||||
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
||||
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
||||
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
||||
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
||||
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
||||
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
||||
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
||||
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
||||
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
||||
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.42.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp37-cp37m-linux_x86_64
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
_yaml
|
||||
yaml
|
||||
Binary file not shown.
Binary file not shown.
33
.venv/lib/python3.7/site-packages/_yaml/__init__.py
Normal file
33
.venv/lib/python3.7/site-packages/_yaml/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# This is a stub package designed to roughly emulate the _yaml
|
||||
# extension module, which previously existed as a standalone module
|
||||
# and has been moved into the `yaml` package namespace.
|
||||
# It does not perfectly mimic its old counterpart, but should get
|
||||
# close enough for anyone who's relying on it even when they shouldn't.
|
||||
import yaml
|
||||
|
||||
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||
if not getattr(yaml, '__with_libyaml__', False):
|
||||
from sys import version_info
|
||||
|
||||
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||
raise exc("No module named '_yaml'")
|
||||
else:
|
||||
from yaml._yaml import *
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The _yaml extension module is now located at yaml._yaml'
|
||||
' and its location is subject to change. To use the'
|
||||
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||
DeprecationWarning
|
||||
)
|
||||
del warnings
|
||||
# Don't `del yaml` here because yaml is actually an existing
|
||||
# namespace member of _yaml.
|
||||
|
||||
__name__ = '_yaml'
|
||||
# If the module is top-level (i.e. not a part of any specific package)
|
||||
# then the attribute should be set to ''.
|
||||
# https://docs.python.org/3.8/library/types.html
|
||||
__package__ = ''
|
||||
Binary file not shown.
32
.venv/lib/python3.7/site-packages/cerberus/__init__.py
Normal file
32
.venv/lib/python3.7/site-packages/cerberus/__init__.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Extensible validation for Python dictionaries.
|
||||
|
||||
:copyright: 2012-2023 by Nicola Iarocci.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
Full documentation is available at https://python-cerberus.org/
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from cerberus.platform import importlib_metadata
|
||||
from cerberus.schema import rules_set_registry, schema_registry, SchemaError
|
||||
from cerberus.utils import TypeDefinition
|
||||
from cerberus.validator import DocumentError, Validator
|
||||
|
||||
|
||||
try:
|
||||
__version__ = importlib_metadata.version("Cerberus")
|
||||
except importlib_metadata.PackageNotFoundError:
|
||||
__version__ = "unknown"
|
||||
|
||||
__all__ = [
|
||||
DocumentError.__name__,
|
||||
SchemaError.__name__,
|
||||
TypeDefinition.__name__,
|
||||
Validator.__name__,
|
||||
"schema_registry",
|
||||
"rules_set_registry",
|
||||
"__version__",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
655
.venv/lib/python3.7/site-packages/cerberus/errors.py
Normal file
655
.venv/lib/python3.7/site-packages/cerberus/errors.py
Normal file
@@ -0,0 +1,655 @@
|
||||
# -*-: coding utf-8 -*-
|
||||
""" This module contains the error-related constants and classes. """
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from collections import defaultdict, namedtuple
|
||||
from copy import copy, deepcopy
|
||||
from functools import wraps
|
||||
from pprint import pformat
|
||||
|
||||
from cerberus.platform import MutableMapping
|
||||
from cerberus.utils import compare_paths_lt, quote_string
|
||||
|
||||
|
||||
ErrorDefinition = namedtuple('ErrorDefinition', 'code, rule')
|
||||
"""
|
||||
This class is used to define possible errors. Each distinguishable error is
|
||||
defined by a *unique* error ``code`` as integer and the ``rule`` that can
|
||||
cause it as string.
|
||||
The instances' names do not contain a common prefix as they are supposed to be
|
||||
referenced within the module namespace, e.g. ``errors.CUSTOM``.
|
||||
"""
|
||||
|
||||
|
||||
# custom
|
||||
CUSTOM = ErrorDefinition(0x00, None)
|
||||
|
||||
# existence
|
||||
DOCUMENT_MISSING = ErrorDefinition(0x01, None) # issues/141
|
||||
DOCUMENT_MISSING = "document is missing"
|
||||
REQUIRED_FIELD = ErrorDefinition(0x02, 'required')
|
||||
UNKNOWN_FIELD = ErrorDefinition(0x03, None)
|
||||
DEPENDENCIES_FIELD = ErrorDefinition(0x04, 'dependencies')
|
||||
DEPENDENCIES_FIELD_VALUE = ErrorDefinition(0x05, 'dependencies')
|
||||
EXCLUDES_FIELD = ErrorDefinition(0x06, 'excludes')
|
||||
|
||||
# shape
|
||||
DOCUMENT_FORMAT = ErrorDefinition(0x21, None) # issues/141
|
||||
DOCUMENT_FORMAT = "'{0}' is not a document, must be a dict"
|
||||
EMPTY_NOT_ALLOWED = ErrorDefinition(0x22, 'empty')
|
||||
NOT_NULLABLE = ErrorDefinition(0x23, 'nullable')
|
||||
BAD_TYPE = ErrorDefinition(0x24, 'type')
|
||||
BAD_TYPE_FOR_SCHEMA = ErrorDefinition(0x25, 'schema')
|
||||
ITEMS_LENGTH = ErrorDefinition(0x26, 'items')
|
||||
MIN_LENGTH = ErrorDefinition(0x27, 'minlength')
|
||||
MAX_LENGTH = ErrorDefinition(0x28, 'maxlength')
|
||||
|
||||
|
||||
# color
|
||||
REGEX_MISMATCH = ErrorDefinition(0x41, 'regex')
|
||||
MIN_VALUE = ErrorDefinition(0x42, 'min')
|
||||
MAX_VALUE = ErrorDefinition(0x43, 'max')
|
||||
UNALLOWED_VALUE = ErrorDefinition(0x44, 'allowed')
|
||||
UNALLOWED_VALUES = ErrorDefinition(0x45, 'allowed')
|
||||
FORBIDDEN_VALUE = ErrorDefinition(0x46, 'forbidden')
|
||||
FORBIDDEN_VALUES = ErrorDefinition(0x47, 'forbidden')
|
||||
MISSING_MEMBERS = ErrorDefinition(0x48, 'contains')
|
||||
|
||||
# other
|
||||
NORMALIZATION = ErrorDefinition(0x60, None)
|
||||
COERCION_FAILED = ErrorDefinition(0x61, 'coerce')
|
||||
RENAMING_FAILED = ErrorDefinition(0x62, 'rename_handler')
|
||||
READONLY_FIELD = ErrorDefinition(0x63, 'readonly')
|
||||
SETTING_DEFAULT_FAILED = ErrorDefinition(0x64, 'default_setter')
|
||||
|
||||
# groups
|
||||
ERROR_GROUP = ErrorDefinition(0x80, None)
|
||||
MAPPING_SCHEMA = ErrorDefinition(0x81, 'schema')
|
||||
SEQUENCE_SCHEMA = ErrorDefinition(0x82, 'schema')
|
||||
# TODO remove KEYSCHEMA AND VALUESCHEMA with next major release
|
||||
KEYSRULES = KEYSCHEMA = ErrorDefinition(0x83, 'keysrules')
|
||||
VALUESRULES = VALUESCHEMA = ErrorDefinition(0x84, 'valuesrules')
|
||||
BAD_ITEMS = ErrorDefinition(0x8F, 'items')
|
||||
|
||||
LOGICAL = ErrorDefinition(0x90, None)
|
||||
NONEOF = ErrorDefinition(0x91, 'noneof')
|
||||
ONEOF = ErrorDefinition(0x92, 'oneof')
|
||||
ANYOF = ErrorDefinition(0x93, 'anyof')
|
||||
ALLOF = ErrorDefinition(0x94, 'allof')
|
||||
|
||||
|
||||
""" SchemaError messages """
|
||||
|
||||
SCHEMA_ERROR_DEFINITION_TYPE = "schema definition for field '{0}' must be a dict"
|
||||
SCHEMA_ERROR_MISSING = "validation schema missing"
|
||||
|
||||
|
||||
""" Error representations """
|
||||
|
||||
|
||||
class ValidationError(object):
|
||||
"""A simple class to store and query basic error information."""
|
||||
|
||||
def __init__(self, document_path, schema_path, code, rule, constraint, value, info):
|
||||
self.document_path = document_path
|
||||
""" The path to the field within the document that caused the error.
|
||||
Type: :class:`tuple` """
|
||||
self.schema_path = schema_path
|
||||
""" The path to the rule within the schema that caused the error.
|
||||
Type: :class:`tuple` """
|
||||
self.code = code
|
||||
""" The error's identifier code. Type: :class:`int` """
|
||||
self.rule = rule
|
||||
""" The rule that failed. Type: `string` """
|
||||
self.constraint = constraint
|
||||
""" The constraint that failed. """
|
||||
self.value = value
|
||||
""" The value that failed. """
|
||||
self.info = info
|
||||
""" May hold additional information about the error.
|
||||
Type: :class:`tuple` """
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Assumes the errors relate to the same document and schema."""
|
||||
return hash(self) == hash(other)
|
||||
|
||||
def __hash__(self):
|
||||
"""Expects that all other properties are transitively determined."""
|
||||
return hash(self.document_path) ^ hash(self.schema_path) ^ hash(self.code)
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.document_path != other.document_path:
|
||||
return compare_paths_lt(self.document_path, other.document_path)
|
||||
else:
|
||||
return compare_paths_lt(self.schema_path, other.schema_path)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"{class_name} @ {memptr} ( "
|
||||
"document_path={document_path},"
|
||||
"schema_path={schema_path},"
|
||||
"code={code},"
|
||||
"constraint={constraint},"
|
||||
"value={value},"
|
||||
"info={info} )".format(
|
||||
class_name=self.__class__.__name__,
|
||||
memptr=hex(id(self)), # noqa: E501
|
||||
document_path=self.document_path,
|
||||
schema_path=self.schema_path,
|
||||
code=hex(self.code),
|
||||
constraint=quote_string(self.constraint),
|
||||
value=quote_string(self.value),
|
||||
info=self.info,
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def child_errors(self):
|
||||
"""
|
||||
A list that contains the individual errors of a bulk validation error.
|
||||
"""
|
||||
return self.info[0] if self.is_group_error else None
|
||||
|
||||
@property
|
||||
def definitions_errors(self):
|
||||
"""
|
||||
Dictionary with errors of an \*of-rule mapped to the index of the definition it
|
||||
occurred in. Returns :obj:`None` if not applicable.
|
||||
"""
|
||||
if not self.is_logic_error:
|
||||
return None
|
||||
|
||||
result = defaultdict(list)
|
||||
for error in self.child_errors:
|
||||
i = error.schema_path[len(self.schema_path)]
|
||||
result[i].append(error)
|
||||
return result
|
||||
|
||||
@property
|
||||
def field(self):
|
||||
"""Field of the contextual mapping, possibly :obj:`None`."""
|
||||
if self.document_path:
|
||||
return self.document_path[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_group_error(self):
|
||||
"""``True`` for errors of bulk validations."""
|
||||
return bool(self.code & ERROR_GROUP.code)
|
||||
|
||||
@property
|
||||
def is_logic_error(self):
|
||||
"""
|
||||
``True`` for validation errors against different schemas with \*of-rules.
|
||||
"""
|
||||
return bool(self.code & LOGICAL.code - ERROR_GROUP.code)
|
||||
|
||||
@property
|
||||
def is_normalization_error(self):
|
||||
"""``True`` for normalization errors."""
|
||||
return bool(self.code & NORMALIZATION.code)
|
||||
|
||||
|
||||
class ErrorList(list):
|
||||
"""
|
||||
A list for :class:`~cerberus.errors.ValidationError` instances that can be queried
|
||||
with the ``in`` keyword for a particular :class:`~cerberus.errors.ErrorDefinition`.
|
||||
"""
|
||||
|
||||
def __contains__(self, error_definition):
|
||||
if not isinstance(error_definition, ErrorDefinition):
|
||||
raise TypeError
|
||||
|
||||
wanted_code = error_definition.code
|
||||
return any(x.code == wanted_code for x in self)
|
||||
|
||||
|
||||
class ErrorTreeNode(MutableMapping):
|
||||
__slots__ = ('descendants', 'errors', 'parent_node', 'path', 'tree_root')
|
||||
|
||||
def __init__(self, path, parent_node):
|
||||
self.parent_node = parent_node
|
||||
self.tree_root = self.parent_node.tree_root
|
||||
self.path = path[: self.parent_node.depth + 1]
|
||||
self.errors = ErrorList()
|
||||
self.descendants = {}
|
||||
|
||||
def __contains__(self, item):
|
||||
if isinstance(item, ErrorDefinition):
|
||||
return item in self.errors
|
||||
else:
|
||||
return item in self.descendants
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.descendants[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.errors)
|
||||
|
||||
def __getitem__(self, item):
|
||||
if isinstance(item, ErrorDefinition):
|
||||
for error in self.errors:
|
||||
if item.code == error.code:
|
||||
return error
|
||||
return None
|
||||
else:
|
||||
return self.descendants.get(item)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.errors)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.descendants[key] = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.errors) + ',' + str(self.descendants)
|
||||
|
||||
@property
|
||||
def depth(self):
|
||||
return len(self.path)
|
||||
|
||||
@property
|
||||
def tree_type(self):
|
||||
return self.tree_root.tree_type
|
||||
|
||||
def add(self, error):
|
||||
error_path = self._path_of_(error)
|
||||
|
||||
key = error_path[self.depth]
|
||||
if key not in self.descendants:
|
||||
self[key] = ErrorTreeNode(error_path, self)
|
||||
|
||||
node = self[key]
|
||||
|
||||
if len(error_path) == self.depth + 1:
|
||||
node.errors.append(error)
|
||||
node.errors.sort()
|
||||
if error.is_group_error:
|
||||
for child_error in error.child_errors:
|
||||
self.tree_root.add(child_error)
|
||||
else:
|
||||
node.add(error)
|
||||
|
||||
def _path_of_(self, error):
|
||||
return getattr(error, self.tree_type + '_path')
|
||||
|
||||
|
||||
class ErrorTree(ErrorTreeNode):
|
||||
"""
|
||||
Base class for :class:`~cerberus.errors.DocumentErrorTree` and
|
||||
:class:`~cerberus.errors.SchemaErrorTree`.
|
||||
"""
|
||||
|
||||
def __init__(self, errors=()):
|
||||
self.parent_node = None
|
||||
self.tree_root = self
|
||||
self.path = ()
|
||||
self.errors = ErrorList()
|
||||
self.descendants = {}
|
||||
for error in errors:
|
||||
self.add(error)
|
||||
|
||||
def add(self, error):
|
||||
"""
|
||||
Add an error to the tree.
|
||||
|
||||
:param error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
if not self._path_of_(error):
|
||||
self.errors.append(error)
|
||||
self.errors.sort()
|
||||
else:
|
||||
super(ErrorTree, self).add(error)
|
||||
|
||||
def fetch_errors_from(self, path):
|
||||
"""
|
||||
Returns all errors for a particular path.
|
||||
|
||||
:param path: :class:`tuple` of :term:`hashable` s.
|
||||
:rtype: :class:`~cerberus.errors.ErrorList`
|
||||
"""
|
||||
node = self.fetch_node_from(path)
|
||||
if node is not None:
|
||||
return node.errors
|
||||
else:
|
||||
return ErrorList()
|
||||
|
||||
def fetch_node_from(self, path):
|
||||
"""
|
||||
Returns a node for a path.
|
||||
|
||||
:param path: Tuple of :term:`hashable` s.
|
||||
:rtype: :class:`~cerberus.errors.ErrorTreeNode` or :obj:`None`
|
||||
"""
|
||||
context = self
|
||||
for key in path:
|
||||
context = context[key]
|
||||
if context is None:
|
||||
break
|
||||
return context
|
||||
|
||||
|
||||
class DocumentErrorTree(ErrorTree):
|
||||
"""
|
||||
Implements a dict-like class to query errors by indexes following the structure of a
|
||||
validated document.
|
||||
"""
|
||||
|
||||
tree_type = 'document'
|
||||
|
||||
|
||||
class SchemaErrorTree(ErrorTree):
|
||||
"""
|
||||
Implements a dict-like class to query errors by indexes following the structure of
|
||||
the used schema.
|
||||
"""
|
||||
|
||||
tree_type = 'schema'
|
||||
|
||||
|
||||
class BaseErrorHandler(object):
|
||||
"""Base class for all error handlers.
|
||||
Subclasses are identified as error-handlers with an instance-test."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Optionally initialize a new instance."""
|
||||
pass
|
||||
|
||||
def __call__(self, errors):
|
||||
"""
|
||||
Returns errors in a handler-specific format.
|
||||
|
||||
:param errors: An object containing the errors.
|
||||
:type errors: :term:`iterable` of
|
||||
:class:`~cerberus.errors.ValidationError` instances or a
|
||||
:class:`~cerberus.Validator` instance
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __iter__(self):
|
||||
"""Be a superhero and implement an iterator over errors."""
|
||||
raise NotImplementedError
|
||||
|
||||
def add(self, error):
|
||||
"""
|
||||
Add an error to the errors' container object of a handler.
|
||||
|
||||
:param error: The error to add.
|
||||
:type error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def emit(self, error):
|
||||
"""
|
||||
Optionally emits an error in the handler's format to a stream. Or light a LED,
|
||||
or even shut down a power plant.
|
||||
|
||||
:param error: The error to emit.
|
||||
:type error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
pass
|
||||
|
||||
def end(self, validator):
|
||||
"""
|
||||
Gets called when a validation ends.
|
||||
|
||||
:param validator: The calling validator.
|
||||
:type validator: :class:`~cerberus.Validator`
|
||||
"""
|
||||
pass
|
||||
|
||||
def extend(self, errors):
|
||||
"""
|
||||
Adds all errors to the handler's container object.
|
||||
|
||||
:param errors: The errors to add.
|
||||
:type errors: :term:`iterable` of
|
||||
:class:`~cerberus.errors.ValidationError` instances
|
||||
"""
|
||||
for error in errors:
|
||||
self.add(error)
|
||||
|
||||
def start(self, validator):
|
||||
"""
|
||||
Gets called when a validation starts.
|
||||
|
||||
:param validator: The calling validator.
|
||||
:type validator: :class:`~cerberus.Validator`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ToyErrorHandler(BaseErrorHandler):
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise RuntimeError('This is not supposed to happen.')
|
||||
|
||||
def clear(self):
|
||||
pass
|
||||
|
||||
|
||||
def encode_unicode(f):
|
||||
"""Cerberus error messages expect regular binary strings.
|
||||
If unicode is used in a ValidationError message can't be printed.
|
||||
|
||||
This decorator ensures that if legacy Python is used unicode
|
||||
strings are encoded before passing to a function.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrapped(obj, error):
|
||||
def _encode(value):
|
||||
"""Helper encoding unicode strings into binary utf-8"""
|
||||
if isinstance(value, unicode): # noqa: F821
|
||||
return value.encode('utf-8')
|
||||
return value
|
||||
|
||||
error = copy(error)
|
||||
error.document_path = _encode(error.document_path)
|
||||
error.schema_path = _encode(error.schema_path)
|
||||
error.constraint = _encode(error.constraint)
|
||||
error.value = _encode(error.value)
|
||||
error.info = _encode(error.info)
|
||||
return f(obj, error)
|
||||
|
||||
return wrapped if sys.version_info < (3,) else f
|
||||
|
||||
|
||||
class BasicErrorHandler(BaseErrorHandler):
|
||||
"""
|
||||
Models cerberus' legacy. Returns a :class:`dict`. When mangled through :class:`str`
|
||||
a pretty-formatted representation of that tree is returned.
|
||||
"""
|
||||
|
||||
messages = {
|
||||
0x00: "{0}",
|
||||
0x01: "document is missing",
|
||||
0x02: "required field",
|
||||
0x03: "unknown field",
|
||||
0x04: "field '{0}' is required",
|
||||
0x05: "depends on these values: {constraint}",
|
||||
0x06: "{0} must not be present with '{field}'",
|
||||
0x21: "'{0}' is not a document, must be a dict",
|
||||
0x22: "empty values not allowed",
|
||||
0x23: "null value not allowed",
|
||||
0x24: "must be of {constraint} type",
|
||||
0x25: "must be of dict type",
|
||||
0x26: "length of list should be {0}, it is {1}",
|
||||
0x27: "min length is {constraint}",
|
||||
0x28: "max length is {constraint}",
|
||||
0x41: "value does not match regex '{constraint}'",
|
||||
0x42: "min value is {constraint}",
|
||||
0x43: "max value is {constraint}",
|
||||
0x44: "unallowed value {value}",
|
||||
0x45: "unallowed values {0}",
|
||||
0x46: "unallowed value {value}",
|
||||
0x47: "unallowed values {0}",
|
||||
0x48: "missing members {0}",
|
||||
0x61: "field '{field}' cannot be coerced: {0}",
|
||||
0x62: "field '{field}' cannot be renamed: {0}",
|
||||
0x63: "field is read-only",
|
||||
0x64: "default value for '{field}' cannot be set: {0}",
|
||||
0x81: "mapping doesn't validate subschema: {0}",
|
||||
0x82: "one or more sequence-items don't validate: {0}",
|
||||
0x83: "one or more keys of a mapping don't validate: {0}",
|
||||
0x84: "one or more values in a mapping don't validate: {0}",
|
||||
0x85: "one or more sequence-items don't validate: {0}",
|
||||
0x91: "one or more definitions validate",
|
||||
0x92: "none or more than one rule validate",
|
||||
0x93: "no definitions validate",
|
||||
0x94: "one or more definitions don't validate",
|
||||
}
|
||||
|
||||
def __init__(self, tree=None):
|
||||
self.tree = {} if tree is None else tree
|
||||
|
||||
def __call__(self, errors):
|
||||
self.clear()
|
||||
self.extend(errors)
|
||||
return self.pretty_tree
|
||||
|
||||
def __str__(self):
|
||||
return pformat(self.pretty_tree)
|
||||
|
||||
@property
|
||||
def pretty_tree(self):
|
||||
pretty = deepcopy(self.tree)
|
||||
for field in pretty:
|
||||
self._purge_empty_dicts(pretty[field])
|
||||
return pretty
|
||||
|
||||
@encode_unicode
|
||||
def add(self, error):
|
||||
# Make sure the original error is not altered with
|
||||
# error paths specific to the handler.
|
||||
error = deepcopy(error)
|
||||
|
||||
self._rewrite_error_path(error)
|
||||
|
||||
if error.is_logic_error:
|
||||
self._insert_logic_error(error)
|
||||
elif error.is_group_error:
|
||||
self._insert_group_error(error)
|
||||
elif error.code in self.messages:
|
||||
self._insert_error(
|
||||
error.document_path, self._format_message(error.field, error)
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
self.tree = {}
|
||||
|
||||
def start(self, validator):
|
||||
self.clear()
|
||||
|
||||
def _format_message(self, field, error):
|
||||
return self.messages[error.code].format(
|
||||
*error.info, constraint=error.constraint, field=field, value=error.value
|
||||
)
|
||||
|
||||
def _insert_error(self, path, node):
|
||||
"""
|
||||
Adds an error or sub-tree to :attr:tree.
|
||||
|
||||
:param path: Path to the error.
|
||||
:type path: Tuple of strings and integers.
|
||||
:param node: An error message or a sub-tree.
|
||||
:type node: String or dictionary.
|
||||
"""
|
||||
field = path[0]
|
||||
if len(path) == 1:
|
||||
if field in self.tree:
|
||||
subtree = self.tree[field].pop()
|
||||
self.tree[field] += [node, subtree]
|
||||
else:
|
||||
self.tree[field] = [node, {}]
|
||||
elif len(path) >= 1:
|
||||
if field not in self.tree:
|
||||
self.tree[field] = [{}]
|
||||
subtree = self.tree[field][-1]
|
||||
|
||||
if subtree:
|
||||
new = self.__class__(tree=copy(subtree))
|
||||
else:
|
||||
new = self.__class__()
|
||||
new._insert_error(path[1:], node)
|
||||
subtree.update(new.tree)
|
||||
|
||||
def _insert_group_error(self, error):
|
||||
for child_error in error.child_errors:
|
||||
if child_error.is_logic_error:
|
||||
self._insert_logic_error(child_error)
|
||||
elif child_error.is_group_error:
|
||||
self._insert_group_error(child_error)
|
||||
else:
|
||||
self._insert_error(
|
||||
child_error.document_path,
|
||||
self._format_message(child_error.field, child_error),
|
||||
)
|
||||
|
||||
def _insert_logic_error(self, error):
|
||||
field = error.field
|
||||
self._insert_error(error.document_path, self._format_message(field, error))
|
||||
|
||||
for definition_errors in error.definitions_errors.values():
|
||||
for child_error in definition_errors:
|
||||
if child_error.is_logic_error:
|
||||
self._insert_logic_error(child_error)
|
||||
elif child_error.is_group_error:
|
||||
self._insert_group_error(child_error)
|
||||
else:
|
||||
self._insert_error(
|
||||
child_error.document_path,
|
||||
self._format_message(field, child_error),
|
||||
)
|
||||
|
||||
def _purge_empty_dicts(self, error_list):
|
||||
subtree = error_list[-1]
|
||||
if not error_list[-1]:
|
||||
error_list.pop()
|
||||
else:
|
||||
for key in subtree:
|
||||
self._purge_empty_dicts(subtree[key])
|
||||
|
||||
def _rewrite_error_path(self, error, offset=0):
|
||||
"""
|
||||
Recursively rewrites the error path to correctly represent logic errors
|
||||
"""
|
||||
if error.is_logic_error:
|
||||
self._rewrite_logic_error_path(error, offset)
|
||||
elif error.is_group_error:
|
||||
self._rewrite_group_error_path(error, offset)
|
||||
|
||||
def _rewrite_group_error_path(self, error, offset=0):
|
||||
child_start = len(error.document_path) - offset
|
||||
|
||||
for child_error in error.child_errors:
|
||||
relative_path = child_error.document_path[child_start:]
|
||||
child_error.document_path = error.document_path + relative_path
|
||||
|
||||
self._rewrite_error_path(child_error, offset)
|
||||
|
||||
def _rewrite_logic_error_path(self, error, offset=0):
|
||||
child_start = len(error.document_path) - offset
|
||||
|
||||
for i, definition_errors in error.definitions_errors.items():
|
||||
if not definition_errors:
|
||||
continue
|
||||
|
||||
nodename = '%s definition %s' % (error.rule, i)
|
||||
path = error.document_path + (nodename,)
|
||||
|
||||
for child_error in definition_errors:
|
||||
rel_path = child_error.document_path[child_start:]
|
||||
child_error.document_path = path + rel_path
|
||||
|
||||
self._rewrite_error_path(child_error, offset + 1)
|
||||
|
||||
|
||||
class SchemaErrorHandler(BasicErrorHandler):
|
||||
messages = BasicErrorHandler.messages.copy()
|
||||
messages[0x03] = "unknown rule"
|
||||
61
.venv/lib/python3.7/site-packages/cerberus/platform.py
Normal file
61
.venv/lib/python3.7/site-packages/cerberus/platform.py
Normal file
@@ -0,0 +1,61 @@
|
||||
""" Platform-dependent objects """
|
||||
|
||||
import sys
|
||||
|
||||
if sys.flags.optimize == 2:
|
||||
raise RuntimeError("Cerberus can't be run with Python's optimization level 2.")
|
||||
|
||||
|
||||
if sys.version_info < (3,):
|
||||
_int_types = (int, long) # noqa: F821
|
||||
_str_type = basestring # noqa: F821
|
||||
else:
|
||||
_int_types = (int,)
|
||||
_str_type = str
|
||||
|
||||
|
||||
if sys.version_info < (3, 3):
|
||||
from collections import (
|
||||
Callable,
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
else:
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
import importlib_metadata
|
||||
else:
|
||||
import importlib.metadata as importlib_metadata
|
||||
|
||||
|
||||
__all__ = (
|
||||
"_int_types",
|
||||
"_str_type",
|
||||
"importlib_metadata",
|
||||
Callable.__name__,
|
||||
Container.__name__,
|
||||
Hashable.__name__,
|
||||
Iterable.__name__,
|
||||
Mapping.__name__,
|
||||
MutableMapping.__name__,
|
||||
Sequence.__name__,
|
||||
Set.__name__,
|
||||
Sized.__name__,
|
||||
)
|
||||
554
.venv/lib/python3.7/site-packages/cerberus/schema.py
Normal file
554
.venv/lib/python3.7/site-packages/cerberus/schema.py
Normal file
@@ -0,0 +1,554 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from warnings import warn
|
||||
|
||||
from cerberus import errors
|
||||
from cerberus.platform import (
|
||||
_str_type,
|
||||
Callable,
|
||||
Hashable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
)
|
||||
from cerberus.utils import (
|
||||
get_Validator_class,
|
||||
validator_factory,
|
||||
mapping_hash,
|
||||
TypeDefinition,
|
||||
)
|
||||
|
||||
|
||||
class _Abort(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SchemaError(Exception):
|
||||
"""
|
||||
Raised when the validation schema is missing, has the wrong format or contains
|
||||
errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DefinitionSchema(MutableMapping):
|
||||
"""A dict-subclass for caching of validated schemas."""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if 'SchemaValidator' not in globals():
|
||||
global SchemaValidator
|
||||
SchemaValidator = validator_factory('SchemaValidator', SchemaValidatorMixin)
|
||||
types_mapping = SchemaValidator.types_mapping.copy()
|
||||
types_mapping.update(
|
||||
{
|
||||
'callable': TypeDefinition('callable', (Callable,), ()),
|
||||
'hashable': TypeDefinition('hashable', (Hashable,), ()),
|
||||
}
|
||||
)
|
||||
SchemaValidator.types_mapping = types_mapping
|
||||
|
||||
return super(DefinitionSchema, cls).__new__(cls)
|
||||
|
||||
def __init__(self, validator, schema):
|
||||
"""
|
||||
:param validator: An instance of Validator-(sub-)class that uses this
|
||||
schema.
|
||||
:param schema: A definition-schema as ``dict``. Defaults to an empty
|
||||
one.
|
||||
"""
|
||||
if not isinstance(validator, get_Validator_class()):
|
||||
raise RuntimeError('validator argument must be a Validator-' 'instance.')
|
||||
self.validator = validator
|
||||
|
||||
if isinstance(schema, _str_type):
|
||||
schema = validator.schema_registry.get(schema, schema)
|
||||
|
||||
if not isinstance(schema, Mapping):
|
||||
try:
|
||||
schema = dict(schema)
|
||||
except Exception:
|
||||
raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema))
|
||||
|
||||
self.validation_schema = SchemaValidationSchema(validator)
|
||||
self.schema_validator = SchemaValidator(
|
||||
None,
|
||||
allow_unknown=self.validation_schema,
|
||||
error_handler=errors.SchemaErrorHandler,
|
||||
target_schema=schema,
|
||||
target_validator=validator,
|
||||
)
|
||||
|
||||
schema = self.expand(schema)
|
||||
self.validate(schema)
|
||||
self.schema = schema
|
||||
|
||||
def __delitem__(self, key):
|
||||
_new_schema = self.schema.copy()
|
||||
try:
|
||||
del _new_schema[key]
|
||||
except ValueError:
|
||||
raise SchemaError("Schema has no field '%s' defined" % key)
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
del self.schema[key]
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.schema[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.schema)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.schema)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
value = self.expand({0: value})[0]
|
||||
self.validate({key: value})
|
||||
self.schema[key] = value
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, "schema"):
|
||||
return str(self.schema)
|
||||
else:
|
||||
return "No schema data is set yet."
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self.validator, self.schema.copy())
|
||||
|
||||
@classmethod
|
||||
def expand(cls, schema):
|
||||
try:
|
||||
schema = cls._expand_logical_shortcuts(schema)
|
||||
schema = cls._expand_subschemas(schema)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# TODO remove this with the next major release
|
||||
schema = cls._rename_deprecated_rulenames(schema)
|
||||
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def _expand_logical_shortcuts(cls, schema):
|
||||
"""
|
||||
Expand agglutinated rules in a definition-schema.
|
||||
|
||||
:param schema: The schema-definition to expand.
|
||||
:return: The expanded schema-definition.
|
||||
"""
|
||||
|
||||
def is_of_rule(x):
|
||||
return isinstance(x, _str_type) and x.startswith(
|
||||
('allof_', 'anyof_', 'noneof_', 'oneof_')
|
||||
)
|
||||
|
||||
for field, rules in schema.items():
|
||||
for of_rule in [x for x in rules if is_of_rule(x)]:
|
||||
operator, rule = of_rule.split('_', 1)
|
||||
rules.update({operator: []})
|
||||
for value in rules[of_rule]:
|
||||
rules[operator].append({rule: value})
|
||||
del rules[of_rule]
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def _expand_subschemas(cls, schema):
|
||||
def has_schema_rule():
|
||||
return isinstance(schema[field], Mapping) and 'schema' in schema[field]
|
||||
|
||||
def has_mapping_schema():
|
||||
"""
|
||||
Tries to determine heuristically if the schema-constraints are aimed to
|
||||
mappings.
|
||||
"""
|
||||
try:
|
||||
return all(
|
||||
isinstance(x, Mapping) for x in schema[field]['schema'].values()
|
||||
)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
for field in schema:
|
||||
if not has_schema_rule():
|
||||
pass
|
||||
elif has_mapping_schema():
|
||||
schema[field]['schema'] = cls.expand(schema[field]['schema'])
|
||||
else: # assumes schema-constraints for a sequence
|
||||
schema[field]['schema'] = cls.expand({0: schema[field]['schema']})[0]
|
||||
|
||||
# TODO remove the last two values in the tuple with the next major release
|
||||
for rule in ('keysrules', 'valuesrules', 'keyschema', 'valueschema'):
|
||||
if rule in schema[field]:
|
||||
schema[field][rule] = cls.expand({0: schema[field][rule]})[0]
|
||||
|
||||
for rule in ('allof', 'anyof', 'items', 'noneof', 'oneof'):
|
||||
if rule in schema[field]:
|
||||
if not isinstance(schema[field][rule], Sequence):
|
||||
continue
|
||||
new_rules_definition = []
|
||||
for item in schema[field][rule]:
|
||||
new_rules_definition.append(cls.expand({0: item})[0])
|
||||
schema[field][rule] = new_rules_definition
|
||||
return schema
|
||||
|
||||
def get(self, item, default=None):
|
||||
return self.schema.get(item, default)
|
||||
|
||||
def items(self):
|
||||
return self.schema.items()
|
||||
|
||||
def update(self, schema):
|
||||
try:
|
||||
schema = self.expand(schema)
|
||||
_new_schema = self.schema.copy()
|
||||
_new_schema.update(schema)
|
||||
self.validate(_new_schema)
|
||||
except ValueError:
|
||||
raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema))
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
self.schema = _new_schema
|
||||
|
||||
# TODO remove with next major release
|
||||
@staticmethod
|
||||
def _rename_deprecated_rulenames(schema):
|
||||
for field, rules in schema.items():
|
||||
if isinstance(rules, str): # registry reference
|
||||
continue
|
||||
|
||||
for old, new in (
|
||||
('keyschema', 'keysrules'),
|
||||
('validator', 'check_with'),
|
||||
('valueschema', 'valuesrules'),
|
||||
):
|
||||
if old not in rules:
|
||||
continue
|
||||
|
||||
if new in rules:
|
||||
raise RuntimeError(
|
||||
"The rule '{new}' is also present with its old "
|
||||
"name '{old}' in the same set of rules."
|
||||
)
|
||||
|
||||
warn(
|
||||
"The rule '{old}' was renamed to '{new}'. The old name will "
|
||||
"not be available in the next major release of "
|
||||
"Cerberus.".format(old=old, new=new),
|
||||
DeprecationWarning,
|
||||
)
|
||||
schema[field][new] = schema[field][old]
|
||||
schema[field].pop(old)
|
||||
|
||||
return schema
|
||||
|
||||
def regenerate_validation_schema(self):
|
||||
self.validation_schema = SchemaValidationSchema(self.validator)
|
||||
|
||||
def validate(self, schema=None):
|
||||
"""
|
||||
Validates a schema that defines rules against supported rules.
|
||||
|
||||
:param schema: The schema to be validated as a legal cerberus schema
|
||||
according to the rules of the assigned Validator object.
|
||||
Raises a :class:`~cerberus.base.SchemaError` when an invalid
|
||||
schema is encountered.
|
||||
"""
|
||||
if schema is None:
|
||||
schema = self.schema
|
||||
_hash = (mapping_hash(schema), mapping_hash(self.validator.types_mapping))
|
||||
if _hash not in self.validator._valid_schemas:
|
||||
self._validate(schema)
|
||||
self.validator._valid_schemas.add(_hash)
|
||||
|
||||
def _validate(self, schema):
|
||||
if isinstance(schema, _str_type):
|
||||
schema = self.validator.schema_registry.get(schema, schema)
|
||||
|
||||
test_schema = {}
|
||||
for field, rules in schema.items():
|
||||
if isinstance(rules, _str_type):
|
||||
test_schema[field] = rules_set_registry.get(rules, rules)
|
||||
else:
|
||||
test_rules = {}
|
||||
for rule, constraint in rules.items():
|
||||
test_rules[rule.replace(" ", "_")] = constraint
|
||||
test_schema[field] = test_rules
|
||||
|
||||
if not self.schema_validator(test_schema, normalize=False):
|
||||
raise SchemaError(self.schema_validator.errors)
|
||||
|
||||
|
||||
class UnvalidatedSchema(DefinitionSchema):
|
||||
def __init__(self, schema={}):
|
||||
if not isinstance(schema, Mapping):
|
||||
schema = dict(schema)
|
||||
self.schema = schema
|
||||
|
||||
def validate(self, schema):
|
||||
pass
|
||||
|
||||
def copy(self):
|
||||
# Override ancestor's copy, because
|
||||
# UnvalidatedSchema does not have .validator:
|
||||
return self.__class__(self.schema.copy())
|
||||
|
||||
|
||||
class SchemaValidationSchema(UnvalidatedSchema):
|
||||
def __init__(self, validator):
|
||||
self.schema = {
|
||||
'allow_unknown': False,
|
||||
'schema': validator.rules,
|
||||
'type': 'dict',
|
||||
}
|
||||
|
||||
|
||||
class SchemaValidatorMixin(object):
|
||||
"""
|
||||
This validator mixin provides mechanics to validate schemas passed to a Cerberus
|
||||
validator.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('known_rules_set_refs', set())
|
||||
kwargs.setdefault('known_schema_refs', set())
|
||||
super(SchemaValidatorMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def known_rules_set_refs(self):
|
||||
"""The encountered references to rules set registry items."""
|
||||
return self._config['known_rules_set_refs']
|
||||
|
||||
@property
|
||||
def known_schema_refs(self):
|
||||
"""The encountered references to schema registry items."""
|
||||
return self._config['known_schema_refs']
|
||||
|
||||
@property
|
||||
def target_schema(self):
|
||||
"""The schema that is being validated."""
|
||||
return self._config['target_schema']
|
||||
|
||||
@property
|
||||
def target_validator(self):
|
||||
"""The validator whose schema is being validated."""
|
||||
return self._config['target_validator']
|
||||
|
||||
def _check_with_bulk_schema(self, field, value):
|
||||
# resolve schema registry reference
|
||||
if isinstance(value, _str_type):
|
||||
if value in self.known_rules_set_refs:
|
||||
return
|
||||
else:
|
||||
self.known_rules_set_refs.add(value)
|
||||
definition = self.target_validator.rules_set_registry.get(value)
|
||||
if definition is None:
|
||||
self._error(field, 'Rules set definition %s not found.' % value)
|
||||
return
|
||||
else:
|
||||
value = definition
|
||||
|
||||
_hash = (
|
||||
mapping_hash({'turing': value}),
|
||||
mapping_hash(self.target_validator.types_mapping),
|
||||
)
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field,
|
||||
allow_unknown=False,
|
||||
schema=self.target_validator.rules,
|
||||
)
|
||||
validator(value, normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
def _check_with_dependencies(self, field, value):
|
||||
if isinstance(value, _str_type):
|
||||
pass
|
||||
elif isinstance(value, Mapping):
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field,
|
||||
schema={'valuesrules': {'type': 'list'}},
|
||||
allow_unknown=True,
|
||||
)
|
||||
if not validator(value, normalize=False):
|
||||
self._error(validator._errors)
|
||||
elif isinstance(value, Sequence):
|
||||
if not all(isinstance(x, Hashable) for x in value):
|
||||
path = self.document_path + (field,)
|
||||
self._error(path, 'All dependencies must be a hashable type.')
|
||||
|
||||
def _check_with_items(self, field, value):
|
||||
for i, schema in enumerate(value):
|
||||
self._check_with_bulk_schema((field, i), schema)
|
||||
|
||||
def _check_with_schema(self, field, value):
|
||||
try:
|
||||
value = self._handle_schema_reference_for_validator(field, value)
|
||||
except _Abort:
|
||||
return
|
||||
|
||||
_hash = (mapping_hash(value), mapping_hash(self.target_validator.types_mapping))
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field, schema=None, allow_unknown=self.root_allow_unknown
|
||||
)
|
||||
validator(self._expand_rules_set_refs(value), normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
def _check_with_type(self, field, value):
|
||||
value = set((value,)) if isinstance(value, _str_type) else set(value)
|
||||
invalid_constraints = value - set(self.target_validator.types)
|
||||
if invalid_constraints:
|
||||
self._error(
|
||||
field, 'Unsupported types: {}'.format(', '.join(invalid_constraints))
|
||||
)
|
||||
|
||||
def _expand_rules_set_refs(self, schema):
|
||||
result = {}
|
||||
for k, v in schema.items():
|
||||
if isinstance(v, _str_type):
|
||||
result[k] = self.target_validator.rules_set_registry.get(v)
|
||||
else:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def _handle_schema_reference_for_validator(self, field, value):
|
||||
if not isinstance(value, _str_type):
|
||||
return value
|
||||
if value in self.known_schema_refs:
|
||||
raise _Abort
|
||||
|
||||
self.known_schema_refs.add(value)
|
||||
definition = self.target_validator.schema_registry.get(value)
|
||||
if definition is None:
|
||||
path = self.document_path + (field,)
|
||||
self._error(path, 'Schema definition {} not found.'.format(value))
|
||||
raise _Abort
|
||||
return definition
|
||||
|
||||
def _validate_logical(self, rule, field, value):
|
||||
"""{'allowed': ('allof', 'anyof', 'noneof', 'oneof')}"""
|
||||
if not isinstance(value, Sequence):
|
||||
self._error(field, errors.BAD_TYPE)
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=rule,
|
||||
allow_unknown=False,
|
||||
schema=self.target_validator.validation_rules,
|
||||
)
|
||||
|
||||
for constraints in value:
|
||||
_hash = (
|
||||
mapping_hash({'turing': constraints}),
|
||||
mapping_hash(self.target_validator.types_mapping),
|
||||
)
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
continue
|
||||
|
||||
validator(constraints, normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
|
||||
####
|
||||
|
||||
|
||||
class Registry(object):
|
||||
"""
|
||||
A registry to store and retrieve schemas and parts of it by a name that can be used
|
||||
in validation schemas.
|
||||
|
||||
:param definitions: Optional, initial definitions.
|
||||
:type definitions: any :term:`mapping`
|
||||
"""
|
||||
|
||||
def __init__(self, definitions={}):
|
||||
self._storage = {}
|
||||
self.extend(definitions)
|
||||
|
||||
def add(self, name, definition):
|
||||
"""
|
||||
Register a definition to the registry. Existing definitions are replaced
|
||||
silently.
|
||||
|
||||
:param name: The name which can be used as reference in a validation
|
||||
schema.
|
||||
:type name: :class:`str`
|
||||
:param definition: The definition.
|
||||
:type definition: any :term:`mapping`
|
||||
"""
|
||||
self._storage[name] = self._expand_definition(definition)
|
||||
|
||||
def all(self):
|
||||
"""
|
||||
Returns a :class:`dict` with all registered definitions mapped to their name.
|
||||
"""
|
||||
return self._storage
|
||||
|
||||
def clear(self):
|
||||
"""Purge all definitions in the registry."""
|
||||
self._storage.clear()
|
||||
|
||||
def extend(self, definitions):
|
||||
"""
|
||||
Add several definitions at once. Existing definitions are
|
||||
replaced silently.
|
||||
|
||||
:param definitions: The names and definitions.
|
||||
:type definitions: a :term:`mapping` or an :term:`iterable` with
|
||||
two-value :class:`tuple` s
|
||||
"""
|
||||
for name, definition in dict(definitions).items():
|
||||
self.add(name, definition)
|
||||
|
||||
def get(self, name, default=None):
|
||||
"""
|
||||
Retrieve a definition from the registry.
|
||||
|
||||
:param name: The reference that points to the definition.
|
||||
:type name: :class:`str`
|
||||
:param default: Return value if the reference isn't registered.
|
||||
"""
|
||||
return self._storage.get(name, default)
|
||||
|
||||
def remove(self, *names):
|
||||
"""
|
||||
Unregister definitions from the registry.
|
||||
|
||||
:param names: The names of the definitions that are to be
|
||||
unregistered.
|
||||
"""
|
||||
for name in names:
|
||||
self._storage.pop(name, None)
|
||||
|
||||
|
||||
class SchemaRegistry(Registry):
|
||||
@classmethod
|
||||
def _expand_definition(cls, definition):
|
||||
return DefinitionSchema.expand(definition)
|
||||
|
||||
|
||||
class RulesSetRegistry(Registry):
|
||||
@classmethod
|
||||
def _expand_definition(cls, definition):
|
||||
return DefinitionSchema.expand({0: definition})[0]
|
||||
|
||||
|
||||
schema_registry, rules_set_registry = SchemaRegistry(), RulesSetRegistry()
|
||||
132
.venv/lib/python3.7/site-packages/cerberus/utils.py
Normal file
132
.venv/lib/python3.7/site-packages/cerberus/utils.py
Normal file
@@ -0,0 +1,132 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from cerberus.platform import _int_types, _str_type, Mapping, Sequence, Set
|
||||
|
||||
|
||||
TypeDefinition = namedtuple('TypeDefinition', 'name,included_types,excluded_types')
|
||||
"""
|
||||
This class is used to define types that can be used as value in the
|
||||
:attr:`~cerberus.Validator.types_mapping` property.
|
||||
The ``name`` should be descriptive and match the key it is going to be assigned
|
||||
to.
|
||||
A value that is validated against such definition must be an instance of any of
|
||||
the types contained in ``included_types`` and must not match any of the types
|
||||
contained in ``excluded_types``.
|
||||
"""
|
||||
|
||||
|
||||
def compare_paths_lt(x, y):
|
||||
min_length = min(len(x), len(y))
|
||||
|
||||
if x[:min_length] == y[:min_length]:
|
||||
return len(x) == min_length
|
||||
|
||||
for i in range(min_length):
|
||||
a, b = x[i], y[i]
|
||||
|
||||
for _type in (_int_types, _str_type, tuple):
|
||||
if isinstance(a, _type):
|
||||
if isinstance(b, _type):
|
||||
break
|
||||
else:
|
||||
return True
|
||||
|
||||
if a == b:
|
||||
continue
|
||||
elif a < b:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
raise RuntimeError
|
||||
|
||||
|
||||
def drop_item_from_tuple(t, i):
|
||||
return t[:i] + t[i + 1 :]
|
||||
|
||||
|
||||
def get_Validator_class():
|
||||
global Validator
|
||||
if 'Validator' not in globals():
|
||||
from cerberus.validator import Validator
|
||||
return Validator
|
||||
|
||||
|
||||
def mapping_hash(schema):
|
||||
return hash(mapping_to_frozenset(schema))
|
||||
|
||||
|
||||
def mapping_to_frozenset(mapping):
|
||||
"""
|
||||
Be aware that this treats any sequence type with the equal members as equal. As it
|
||||
is used to identify equality of schemas, this can be considered okay as definitions
|
||||
are semantically equal regardless the container type.
|
||||
"""
|
||||
|
||||
aggregation = {}
|
||||
|
||||
for key, value in mapping.items():
|
||||
if isinstance(value, Mapping):
|
||||
aggregation[key] = mapping_to_frozenset(value)
|
||||
elif isinstance(value, Sequence):
|
||||
value = list(value)
|
||||
for i, item in enumerate(value):
|
||||
if isinstance(item, Mapping):
|
||||
value[i] = mapping_to_frozenset(item)
|
||||
aggregation[key] = tuple(value)
|
||||
elif isinstance(value, Set):
|
||||
aggregation[key] = frozenset(value)
|
||||
else:
|
||||
aggregation[key] = value
|
||||
|
||||
return frozenset(aggregation.items())
|
||||
|
||||
|
||||
def quote_string(value):
|
||||
if isinstance(value, _str_type):
|
||||
return '"%s"' % value
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class readonly_classproperty(property):
|
||||
def __get__(self, instance, owner):
|
||||
return super(readonly_classproperty, self).__get__(owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise RuntimeError('This is a readonly class property.')
|
||||
|
||||
def __delete__(self, instance):
|
||||
raise RuntimeError('This is a readonly class property.')
|
||||
|
||||
|
||||
def validator_factory(name, bases=None, namespace={}):
|
||||
"""
|
||||
Dynamically create a :class:`~cerberus.Validator` subclass.
|
||||
Docstrings of mixin-classes will be added to the resulting class' one if ``__doc__``
|
||||
is not in :obj:`namespace`.
|
||||
|
||||
:param name: The name of the new class.
|
||||
:type name: :class:`str`
|
||||
:param bases: Class(es) with additional and overriding attributes.
|
||||
:type bases: :class:`tuple` of or a single :term:`class`
|
||||
:param namespace: Attributes for the new class.
|
||||
:type namespace: :class:`dict`
|
||||
:return: The created class.
|
||||
"""
|
||||
Validator = get_Validator_class()
|
||||
|
||||
if bases is None:
|
||||
bases = (Validator,)
|
||||
elif isinstance(bases, tuple):
|
||||
bases += (Validator,)
|
||||
else:
|
||||
bases = (bases, Validator)
|
||||
|
||||
docstrings = [x.__doc__ for x in bases if x.__doc__]
|
||||
if len(docstrings) > 1 and '__doc__' not in namespace:
|
||||
namespace.update({'__doc__': '\n'.join(docstrings)})
|
||||
|
||||
return type(name, bases, namespace)
|
||||
1674
.venv/lib/python3.7/site-packages/cerberus/validator.py
Normal file
1674
.venv/lib/python3.7/site-packages/cerberus/validator.py
Normal file
File diff suppressed because it is too large
Load Diff
5
.venv/lib/python3.7/site-packages/easy_install.py
Normal file
5
.venv/lib/python3.7/site-packages/easy_install.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Run the EasyInstall command"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
from setuptools.command.easy_install import main
|
||||
main()
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -0,0 +1,134 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: importlib-metadata
|
||||
Version: 6.7.0
|
||||
Summary: Read metadata from Python packages
|
||||
Home-page: https://github.com/python/importlib_metadata
|
||||
Author: Jason R. Coombs
|
||||
Author-email: jaraco@jaraco.com
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Requires-Python: >=3.7
|
||||
License-File: LICENSE
|
||||
Requires-Dist: zipp (>=0.5)
|
||||
Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8"
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
|
||||
Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
|
||||
Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
|
||||
Requires-Dist: furo ; extra == 'docs'
|
||||
Requires-Dist: sphinx-lint ; extra == 'docs'
|
||||
Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
|
||||
Provides-Extra: perf
|
||||
Requires-Dist: ipython ; extra == 'perf'
|
||||
Provides-Extra: testing
|
||||
Requires-Dist: pytest (>=6) ; extra == 'testing'
|
||||
Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
|
||||
Requires-Dist: pytest-cov ; extra == 'testing'
|
||||
Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
|
||||
Requires-Dist: pytest-ruff ; extra == 'testing'
|
||||
Requires-Dist: packaging ; extra == 'testing'
|
||||
Requires-Dist: pyfakefs ; extra == 'testing'
|
||||
Requires-Dist: flufl.flake8 ; extra == 'testing'
|
||||
Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing'
|
||||
Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
||||
Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
||||
Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing'
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
|
||||
:target: https://pypi.org/project/importlib_metadata
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
|
||||
|
||||
.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg
|
||||
:target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22
|
||||
:alt: tests
|
||||
|
||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/psf/black
|
||||
:alt: Code style: Black
|
||||
|
||||
.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
|
||||
:target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
|
||||
|
||||
.. image:: https://img.shields.io/badge/skeleton-2023-informational
|
||||
:target: https://blog.jaraco.com/skeleton
|
||||
|
||||
.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata
|
||||
:target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme
|
||||
|
||||
Library to access the metadata for a Python package.
|
||||
|
||||
This package supplies third-party access to the functionality of
|
||||
`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_
|
||||
including improvements added to subsequent Python versions.
|
||||
|
||||
|
||||
Compatibility
|
||||
=============
|
||||
|
||||
New features are introduced in this third-party library and later merged
|
||||
into CPython. The following table indicates which versions of this library
|
||||
were contributed to different versions in the standard library:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - importlib_metadata
|
||||
- stdlib
|
||||
* - 6.5
|
||||
- 3.12
|
||||
* - 4.13
|
||||
- 3.11
|
||||
* - 4.6
|
||||
- 3.10
|
||||
* - 1.4
|
||||
- 3.8
|
||||
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
See the `online documentation <https://importlib-metadata.readthedocs.io/>`_
|
||||
for usage details.
|
||||
|
||||
`Finder authors
|
||||
<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
|
||||
also add support for custom package installers. See the above documentation
|
||||
for details.
|
||||
|
||||
|
||||
Caveats
|
||||
=======
|
||||
|
||||
This project primarily supports third-party packages installed by PyPA
|
||||
tools (or other conforming packages). It does not support:
|
||||
|
||||
- Packages in the stdlib.
|
||||
- Packages installed without metadata.
|
||||
|
||||
Project details
|
||||
===============
|
||||
|
||||
* Project home: https://github.com/python/importlib_metadata
|
||||
* Report bugs at: https://github.com/python/importlib_metadata/issues
|
||||
* Code hosting: https://github.com/python/importlib_metadata
|
||||
* Documentation: https://importlib-metadata.readthedocs.io/
|
||||
|
||||
For Enterprise
|
||||
==============
|
||||
|
||||
Available as part of the Tidelift Subscription.
|
||||
|
||||
This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
||||
|
||||
`Learn more <https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=referral&utm_campaign=github>`_.
|
||||
|
||||
Security Contact
|
||||
================
|
||||
|
||||
To report a security vulnerability, please use the
|
||||
`Tidelift security contact <https://tidelift.com/security>`_.
|
||||
Tidelift will coordinate the fix and disclosure.
|
||||
@@ -0,0 +1,25 @@
|
||||
importlib_metadata-6.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
importlib_metadata-6.7.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
||||
importlib_metadata-6.7.0.dist-info/METADATA,sha256=JDrzuuLRE3CxIRXLeXdZGGFDrVlEXUvt-chm0-s-TtI,4878
|
||||
importlib_metadata-6.7.0.dist-info/RECORD,,
|
||||
importlib_metadata-6.7.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
||||
importlib_metadata-6.7.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
|
||||
importlib_metadata/__init__.py,sha256=MQx_tU_lZg-7U91wdrlrsDt0MGPXkpraLevB8LO1NNc,30724
|
||||
importlib_metadata/__pycache__/__init__.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_adapters.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_collections.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_compat.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_functools.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_itertools.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_meta.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_py39compat.cpython-37.pyc,,
|
||||
importlib_metadata/__pycache__/_text.cpython-37.pyc,,
|
||||
importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454
|
||||
importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
|
||||
importlib_metadata/_compat.py,sha256=xaiD8pwYYPCWkVgR30411iT4OmLbSbSAigzhp0nTROw,1735
|
||||
importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
|
||||
importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
|
||||
importlib_metadata/_meta.py,sha256=I2AuaUMr5a6cTdZleV9WpyqUCSooqqV-zSzr1qn7FMw,1615
|
||||
importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098
|
||||
importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
||||
importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.40.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
importlib_metadata
|
||||
1015
.venv/lib/python3.7/site-packages/importlib_metadata/__init__.py
Normal file
1015
.venv/lib/python3.7/site-packages/importlib_metadata/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,90 @@
|
||||
import functools
|
||||
import warnings
|
||||
import re
|
||||
import textwrap
|
||||
import email.message
|
||||
|
||||
from ._text import FoldedCase
|
||||
from ._compat import pypy_partial
|
||||
|
||||
|
||||
# Do not remove prior to 2024-01-01 or Python 3.14
|
||||
_warn = functools.partial(
|
||||
warnings.warn,
|
||||
"Implicit None on return values is deprecated and will raise KeyErrors.",
|
||||
DeprecationWarning,
|
||||
stacklevel=pypy_partial(2),
|
||||
)
|
||||
|
||||
|
||||
class Message(email.message.Message):
|
||||
multiple_use_keys = set(
|
||||
map(
|
||||
FoldedCase,
|
||||
[
|
||||
'Classifier',
|
||||
'Obsoletes-Dist',
|
||||
'Platform',
|
||||
'Project-URL',
|
||||
'Provides-Dist',
|
||||
'Provides-Extra',
|
||||
'Requires-Dist',
|
||||
'Requires-External',
|
||||
'Supported-Platform',
|
||||
'Dynamic',
|
||||
],
|
||||
)
|
||||
)
|
||||
"""
|
||||
Keys that may be indicated multiple times per PEP 566.
|
||||
"""
|
||||
|
||||
def __new__(cls, orig: email.message.Message):
|
||||
res = super().__new__(cls)
|
||||
vars(res).update(vars(orig))
|
||||
return res
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._headers = self._repair_headers()
|
||||
|
||||
# suppress spurious error from mypy
|
||||
def __iter__(self):
|
||||
return super().__iter__()
|
||||
|
||||
def __getitem__(self, item):
|
||||
"""
|
||||
Warn users that a ``KeyError`` can be expected when a
|
||||
mising key is supplied. Ref python/importlib_metadata#371.
|
||||
"""
|
||||
res = super().__getitem__(item)
|
||||
if res is None:
|
||||
_warn()
|
||||
return res
|
||||
|
||||
def _repair_headers(self):
|
||||
def redent(value):
|
||||
"Correct for RFC822 indentation"
|
||||
if not value or '\n' not in value:
|
||||
return value
|
||||
return textwrap.dedent(' ' * 8 + value)
|
||||
|
||||
headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
|
||||
if self._payload:
|
||||
headers.append(('Description', self.get_payload()))
|
||||
return headers
|
||||
|
||||
@property
|
||||
def json(self):
|
||||
"""
|
||||
Convert PackageMetadata to a JSON-compatible format
|
||||
per PEP 0566.
|
||||
"""
|
||||
|
||||
def transform(key):
|
||||
value = self.get_all(key) if key in self.multiple_use_keys else self[key]
|
||||
if key == 'Keywords':
|
||||
value = re.split(r'\s+', value)
|
||||
tk = key.lower().replace('-', '_')
|
||||
return tk, value
|
||||
|
||||
return dict(map(transform, map(FoldedCase, self)))
|
||||
@@ -0,0 +1,30 @@
|
||||
import collections
|
||||
|
||||
|
||||
# from jaraco.collections 3.3
|
||||
class FreezableDefaultDict(collections.defaultdict):
|
||||
"""
|
||||
Often it is desirable to prevent the mutation of
|
||||
a default dict after its initial construction, such
|
||||
as to prevent mutation during iteration.
|
||||
|
||||
>>> dd = FreezableDefaultDict(list)
|
||||
>>> dd[0].append('1')
|
||||
>>> dd.freeze()
|
||||
>>> dd[1]
|
||||
[]
|
||||
>>> len(dd)
|
||||
1
|
||||
"""
|
||||
|
||||
def __missing__(self, key):
|
||||
return getattr(self, '_frozen', super().__missing__)(key)
|
||||
|
||||
def freeze(self):
|
||||
self._frozen = lambda key: self.default_factory()
|
||||
|
||||
|
||||
class Pair(collections.namedtuple('Pair', 'name value')):
|
||||
@classmethod
|
||||
def parse(cls, text):
|
||||
return cls(*map(str.strip, text.split("=", 1)))
|
||||
@@ -0,0 +1,74 @@
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
|
||||
from typing import Union
|
||||
|
||||
|
||||
__all__ = ['install', 'NullFinder', 'Protocol']
|
||||
|
||||
|
||||
try:
|
||||
from typing import Protocol
|
||||
except ImportError: # pragma: no cover
|
||||
# Python 3.7 compatibility
|
||||
from typing_extensions import Protocol # type: ignore
|
||||
|
||||
|
||||
def install(cls):
|
||||
"""
|
||||
Class decorator for installation on sys.meta_path.
|
||||
|
||||
Adds the backport DistributionFinder to sys.meta_path and
|
||||
attempts to disable the finder functionality of the stdlib
|
||||
DistributionFinder.
|
||||
"""
|
||||
sys.meta_path.append(cls())
|
||||
disable_stdlib_finder()
|
||||
return cls
|
||||
|
||||
|
||||
def disable_stdlib_finder():
|
||||
"""
|
||||
Give the backport primacy for discovering path-based distributions
|
||||
by monkey-patching the stdlib O_O.
|
||||
|
||||
See #91 for more background for rationale on this sketchy
|
||||
behavior.
|
||||
"""
|
||||
|
||||
def matches(finder):
|
||||
return getattr(
|
||||
finder, '__module__', None
|
||||
) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
|
||||
|
||||
for finder in filter(matches, sys.meta_path): # pragma: nocover
|
||||
del finder.find_distributions
|
||||
|
||||
|
||||
class NullFinder:
|
||||
"""
|
||||
A "Finder" (aka "MetaClassFinder") that never finds any modules,
|
||||
but may find distributions.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def find_spec(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
def pypy_partial(val):
|
||||
"""
|
||||
Adjust for variable stacklevel on partial under PyPy.
|
||||
|
||||
Workaround for #327.
|
||||
"""
|
||||
is_pypy = platform.python_implementation() == 'PyPy'
|
||||
return val + is_pypy
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
StrPath = Union[str, os.PathLike[str]]
|
||||
else:
|
||||
# PathLike is only subscriptable at runtime in 3.9+
|
||||
StrPath = Union[str, "os.PathLike[str]"] # pragma: no cover
|
||||
@@ -0,0 +1,104 @@
|
||||
import types
|
||||
import functools
|
||||
|
||||
|
||||
# from jaraco.functools 3.3
|
||||
def method_cache(method, cache_wrapper=None):
|
||||
"""
|
||||
Wrap lru_cache to support storing the cache data in the object instances.
|
||||
|
||||
Abstracts the common paradigm where the method explicitly saves an
|
||||
underscore-prefixed protected property on first call and returns that
|
||||
subsequently.
|
||||
|
||||
>>> class MyClass:
|
||||
... calls = 0
|
||||
...
|
||||
... @method_cache
|
||||
... def method(self, value):
|
||||
... self.calls += 1
|
||||
... return value
|
||||
|
||||
>>> a = MyClass()
|
||||
>>> a.method(3)
|
||||
3
|
||||
>>> for x in range(75):
|
||||
... res = a.method(x)
|
||||
>>> a.calls
|
||||
75
|
||||
|
||||
Note that the apparent behavior will be exactly like that of lru_cache
|
||||
except that the cache is stored on each instance, so values in one
|
||||
instance will not flush values from another, and when an instance is
|
||||
deleted, so are the cached values for that instance.
|
||||
|
||||
>>> b = MyClass()
|
||||
>>> for x in range(35):
|
||||
... res = b.method(x)
|
||||
>>> b.calls
|
||||
35
|
||||
>>> a.method(0)
|
||||
0
|
||||
>>> a.calls
|
||||
75
|
||||
|
||||
Note that if method had been decorated with ``functools.lru_cache()``,
|
||||
a.calls would have been 76 (due to the cached value of 0 having been
|
||||
flushed by the 'b' instance).
|
||||
|
||||
Clear the cache with ``.cache_clear()``
|
||||
|
||||
>>> a.method.cache_clear()
|
||||
|
||||
Same for a method that hasn't yet been called.
|
||||
|
||||
>>> c = MyClass()
|
||||
>>> c.method.cache_clear()
|
||||
|
||||
Another cache wrapper may be supplied:
|
||||
|
||||
>>> cache = functools.lru_cache(maxsize=2)
|
||||
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
|
||||
>>> a = MyClass()
|
||||
>>> a.method2()
|
||||
3
|
||||
|
||||
Caution - do not subsequently wrap the method with another decorator, such
|
||||
as ``@property``, which changes the semantics of the function.
|
||||
|
||||
See also
|
||||
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
|
||||
for another implementation and additional justification.
|
||||
"""
|
||||
cache_wrapper = cache_wrapper or functools.lru_cache()
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# it's the first call, replace the method with a cached, bound method
|
||||
bound_method = types.MethodType(method, self)
|
||||
cached_method = cache_wrapper(bound_method)
|
||||
setattr(self, method.__name__, cached_method)
|
||||
return cached_method(*args, **kwargs)
|
||||
|
||||
# Support cache clear even before cache has been created.
|
||||
wrapper.cache_clear = lambda: None
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# From jaraco.functools 3.3
|
||||
def pass_none(func):
|
||||
"""
|
||||
Wrap func so it's not called if its first param is None
|
||||
|
||||
>>> print_text = pass_none(print)
|
||||
>>> print_text('text')
|
||||
text
|
||||
>>> print_text(None)
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(param, *args, **kwargs):
|
||||
if param is not None:
|
||||
return func(param, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
@@ -0,0 +1,73 @@
|
||||
from itertools import filterfalse
|
||||
|
||||
|
||||
def unique_everseen(iterable, key=None):
|
||||
"List unique elements, preserving order. Remember all elements ever seen."
|
||||
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
||||
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
if key is None:
|
||||
for element in filterfalse(seen.__contains__, iterable):
|
||||
seen_add(element)
|
||||
yield element
|
||||
else:
|
||||
for element in iterable:
|
||||
k = key(element)
|
||||
if k not in seen:
|
||||
seen_add(k)
|
||||
yield element
|
||||
|
||||
|
||||
# copied from more_itertools 8.8
|
||||
def always_iterable(obj, base_type=(str, bytes)):
|
||||
"""If *obj* is iterable, return an iterator over its items::
|
||||
|
||||
>>> obj = (1, 2, 3)
|
||||
>>> list(always_iterable(obj))
|
||||
[1, 2, 3]
|
||||
|
||||
If *obj* is not iterable, return a one-item iterable containing *obj*::
|
||||
|
||||
>>> obj = 1
|
||||
>>> list(always_iterable(obj))
|
||||
[1]
|
||||
|
||||
If *obj* is ``None``, return an empty iterable:
|
||||
|
||||
>>> obj = None
|
||||
>>> list(always_iterable(None))
|
||||
[]
|
||||
|
||||
By default, binary and text strings are not considered iterable::
|
||||
|
||||
>>> obj = 'foo'
|
||||
>>> list(always_iterable(obj))
|
||||
['foo']
|
||||
|
||||
If *base_type* is set, objects for which ``isinstance(obj, base_type)``
|
||||
returns ``True`` won't be considered iterable.
|
||||
|
||||
>>> obj = {'a': 1}
|
||||
>>> list(always_iterable(obj)) # Iterate over the dict's keys
|
||||
['a']
|
||||
>>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
|
||||
[{'a': 1}]
|
||||
|
||||
Set *base_type* to ``None`` to avoid any special handling and treat objects
|
||||
Python considers iterable as iterable:
|
||||
|
||||
>>> obj = 'foo'
|
||||
>>> list(always_iterable(obj, base_type=None))
|
||||
['f', 'o', 'o']
|
||||
"""
|
||||
if obj is None:
|
||||
return iter(())
|
||||
|
||||
if (base_type is not None) and isinstance(obj, base_type):
|
||||
return iter((obj,))
|
||||
|
||||
try:
|
||||
return iter(obj)
|
||||
except TypeError:
|
||||
return iter((obj,))
|
||||
@@ -0,0 +1,63 @@
|
||||
from ._compat import Protocol
|
||||
from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class PackageMetadata(Protocol):
|
||||
def __len__(self) -> int:
|
||||
... # pragma: no cover
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
... # pragma: no cover
|
||||
|
||||
def __getitem__(self, key: str) -> str:
|
||||
... # pragma: no cover
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
... # pragma: no cover
|
||||
|
||||
@overload
|
||||
def get(self, name: str, failobj: None = None) -> Optional[str]:
|
||||
... # pragma: no cover
|
||||
|
||||
@overload
|
||||
def get(self, name: str, failobj: _T) -> Union[str, _T]:
|
||||
... # pragma: no cover
|
||||
|
||||
# overload per python/importlib_metadata#435
|
||||
@overload
|
||||
def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]:
|
||||
... # pragma: no cover
|
||||
|
||||
@overload
|
||||
def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
|
||||
"""
|
||||
Return all values associated with a possibly multi-valued key.
|
||||
"""
|
||||
|
||||
@property
|
||||
def json(self) -> Dict[str, Union[str, List[str]]]:
|
||||
"""
|
||||
A JSON-compatible form of the metadata.
|
||||
"""
|
||||
|
||||
|
||||
class SimplePath(Protocol[_T]):
|
||||
"""
|
||||
A minimal subset of pathlib.Path required by PathDistribution.
|
||||
"""
|
||||
|
||||
def joinpath(self, other: Union[str, _T]) -> _T:
|
||||
... # pragma: no cover
|
||||
|
||||
def __truediv__(self, other: Union[str, _T]) -> _T:
|
||||
... # pragma: no cover
|
||||
|
||||
@property
|
||||
def parent(self) -> _T:
|
||||
... # pragma: no cover
|
||||
|
||||
def read_text(self) -> str:
|
||||
... # pragma: no cover
|
||||
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Compatibility layer with Python 3.8/3.9
|
||||
"""
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
# Prevent circular imports on runtime.
|
||||
from . import Distribution, EntryPoint
|
||||
else:
|
||||
Distribution = EntryPoint = Any
|
||||
|
||||
|
||||
def normalized_name(dist: Distribution) -> Optional[str]:
|
||||
"""
|
||||
Honor name normalization for distributions that don't provide ``_normalized_name``.
|
||||
"""
|
||||
try:
|
||||
return dist._normalized_name
|
||||
except AttributeError:
|
||||
from . import Prepared # -> delay to prevent circular imports.
|
||||
|
||||
return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
|
||||
|
||||
|
||||
def ep_matches(ep: EntryPoint, **params) -> bool:
|
||||
"""
|
||||
Workaround for ``EntryPoint`` objects without the ``matches`` method.
|
||||
"""
|
||||
try:
|
||||
return ep.matches(**params)
|
||||
except AttributeError:
|
||||
from . import EntryPoint # -> delay to prevent circular imports.
|
||||
|
||||
# Reconstruct the EntryPoint object to make sure it is compatible.
|
||||
return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
|
||||
@@ -0,0 +1,99 @@
|
||||
import re
|
||||
|
||||
from ._functools import method_cache
|
||||
|
||||
|
||||
# from jaraco.text 3.5
|
||||
class FoldedCase(str):
|
||||
"""
|
||||
A case insensitive string class; behaves just like str
|
||||
except compares equal when the only variation is case.
|
||||
|
||||
>>> s = FoldedCase('hello world')
|
||||
|
||||
>>> s == 'Hello World'
|
||||
True
|
||||
|
||||
>>> 'Hello World' == s
|
||||
True
|
||||
|
||||
>>> s != 'Hello World'
|
||||
False
|
||||
|
||||
>>> s.index('O')
|
||||
4
|
||||
|
||||
>>> s.split('O')
|
||||
['hell', ' w', 'rld']
|
||||
|
||||
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
|
||||
['alpha', 'Beta', 'GAMMA']
|
||||
|
||||
Sequence membership is straightforward.
|
||||
|
||||
>>> "Hello World" in [s]
|
||||
True
|
||||
>>> s in ["Hello World"]
|
||||
True
|
||||
|
||||
You may test for set inclusion, but candidate and elements
|
||||
must both be folded.
|
||||
|
||||
>>> FoldedCase("Hello World") in {s}
|
||||
True
|
||||
>>> s in {FoldedCase("Hello World")}
|
||||
True
|
||||
|
||||
String inclusion works as long as the FoldedCase object
|
||||
is on the right.
|
||||
|
||||
>>> "hello" in FoldedCase("Hello World")
|
||||
True
|
||||
|
||||
But not if the FoldedCase object is on the left:
|
||||
|
||||
>>> FoldedCase('hello') in 'Hello World'
|
||||
False
|
||||
|
||||
In that case, use in_:
|
||||
|
||||
>>> FoldedCase('hello').in_('Hello World')
|
||||
True
|
||||
|
||||
>>> FoldedCase('hello') > FoldedCase('Hello')
|
||||
False
|
||||
"""
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.lower() < other.lower()
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.lower() > other.lower()
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.lower() == other.lower()
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.lower() != other.lower()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.lower())
|
||||
|
||||
def __contains__(self, other):
|
||||
return super().lower().__contains__(other.lower())
|
||||
|
||||
def in_(self, other):
|
||||
"Does self appear in other?"
|
||||
return self in FoldedCase(other)
|
||||
|
||||
# cache lower since it's likely to be called frequently.
|
||||
@method_cache
|
||||
def lower(self):
|
||||
return super().lower()
|
||||
|
||||
def index(self, sub):
|
||||
return self.lower().index(sub.lower())
|
||||
|
||||
def split(self, splitter=' ', maxsplit=0):
|
||||
pattern = re.compile(re.escape(splitter), re.I)
|
||||
return pattern.split(self, maxsplit)
|
||||
37
.venv/lib/python3.7/site-packages/jinja2/__init__.py
Normal file
37
.venv/lib/python3.7/site-packages/jinja2/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Jinja is a template engine written in pure Python. It provides a
|
||||
non-XML syntax that supports inline expressions and an optional
|
||||
sandboxed environment.
|
||||
"""
|
||||
from .bccache import BytecodeCache as BytecodeCache
|
||||
from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
|
||||
from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
|
||||
from .environment import Environment as Environment
|
||||
from .environment import Template as Template
|
||||
from .exceptions import TemplateAssertionError as TemplateAssertionError
|
||||
from .exceptions import TemplateError as TemplateError
|
||||
from .exceptions import TemplateNotFound as TemplateNotFound
|
||||
from .exceptions import TemplateRuntimeError as TemplateRuntimeError
|
||||
from .exceptions import TemplatesNotFound as TemplatesNotFound
|
||||
from .exceptions import TemplateSyntaxError as TemplateSyntaxError
|
||||
from .exceptions import UndefinedError as UndefinedError
|
||||
from .loaders import BaseLoader as BaseLoader
|
||||
from .loaders import ChoiceLoader as ChoiceLoader
|
||||
from .loaders import DictLoader as DictLoader
|
||||
from .loaders import FileSystemLoader as FileSystemLoader
|
||||
from .loaders import FunctionLoader as FunctionLoader
|
||||
from .loaders import ModuleLoader as ModuleLoader
|
||||
from .loaders import PackageLoader as PackageLoader
|
||||
from .loaders import PrefixLoader as PrefixLoader
|
||||
from .runtime import ChainableUndefined as ChainableUndefined
|
||||
from .runtime import DebugUndefined as DebugUndefined
|
||||
from .runtime import make_logging_undefined as make_logging_undefined
|
||||
from .runtime import StrictUndefined as StrictUndefined
|
||||
from .runtime import Undefined as Undefined
|
||||
from .utils import clear_caches as clear_caches
|
||||
from .utils import is_undefined as is_undefined
|
||||
from .utils import pass_context as pass_context
|
||||
from .utils import pass_environment as pass_environment
|
||||
from .utils import pass_eval_context as pass_eval_context
|
||||
from .utils import select_autoescape as select_autoescape
|
||||
|
||||
__version__ = "3.1.3"
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user