diff options
Diffstat (limited to 'venv/lib/python3.11/site-packages/jinja2')
51 files changed, 0 insertions, 14256 deletions
| diff --git a/venv/lib/python3.11/site-packages/jinja2/__init__.py b/venv/lib/python3.11/site-packages/jinja2/__init__.py deleted file mode 100644 index af5d428..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.3" diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pycBinary files differ deleted file mode 100644 index 2d28a72..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pycBinary files differ deleted file mode 100644 index 4032f55..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pycBinary files differ deleted file mode 100644 index b512073..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pycBinary files differ deleted file mode 100644 index 4c6e708..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pycBinary files differ deleted file mode 100644 index e620ecd..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pycBinary files differ deleted file mode 100644 index f7470a8..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pycBinary files differ deleted file mode 100644 index f8e40a5..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pycBinary files differ deleted file mode 100644 index e04a6cd..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pycBinary files differ deleted file mode 100644 index 5c0a4e3..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pycBinary files differ deleted file mode 100644 index e298512..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pycBinary files differ deleted file mode 100644 index 03a8ffe..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pycBinary files differ deleted file mode 100644 index f857e4c..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pycBinary files differ deleted file mode 100644 index bffa9d6..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pycBinary files differ deleted file mode 100644 index 1f44ed6..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pycBinary files differ deleted file mode 100644 index a8c3716..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pycBinary files differ deleted file mode 100644 index 4913275..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pycBinary files differ deleted file mode 100644 index cf504d2..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pycBinary files differ deleted file mode 100644 index 05ac572..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pycBinary files differ deleted file mode 100644 index a3c57f6..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pycBinary files differ deleted file mode 100644 index b353188..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pycBinary files differ deleted file mode 100644 index e5616fb..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pycBinary files differ deleted file mode 100644 index 666954e..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pycBinary files differ deleted file mode 100644 index a80ec3e..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pycBinary files differ deleted file mode 100644 index 2d97d64..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pycBinary files differ deleted file mode 100644 index d0f0209..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/_identifier.py b/venv/lib/python3.11/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( -    r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+"  # noqa: B950 -) diff --git a/venv/lib/python3.11/site-packages/jinja2/async_utils.py b/venv/lib/python3.11/site-packages/jinja2/async_utils.py deleted file mode 100644 index 715d701..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,84 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -V = t.TypeVar("V") - - -def async_variant(normal_func):  # type: ignore -    def decorator(async_func):  # type: ignore -        pass_arg = _PassArg.from_obj(normal_func) -        need_eval_context = pass_arg is None - -        if pass_arg is _PassArg.environment: - -            def is_async(args: t.Any) -> bool: -                return t.cast(bool, args[0].is_async) - -        else: - -            def is_async(args: t.Any) -> bool: -                return t.cast(bool, args[0].environment.is_async) - -        # Take the doc and annotations from the sync function, but the -        # name from the async function. Pallets-Sphinx-Themes -        # build_function_directive expects __wrapped__ to point to the -        # sync function. -        async_func_attrs = ("__module__", "__name__", "__qualname__") -        normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - -        @wraps(normal_func, assigned=normal_func_attrs) -        @wraps(async_func, assigned=async_func_attrs, updated=()) -        def wrapper(*args, **kwargs):  # type: ignore -            b = is_async(args) - -            if need_eval_context: -                args = args[1:] - -            if b: -                return async_func(*args, **kwargs) - -            return normal_func(*args, **kwargs) - -        if need_eval_context: -            wrapper = pass_eval_context(wrapper) - -        wrapper.jinja_async_variant = True -        return wrapper - -    return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": -    # Avoid a costly call to isawaitable -    if type(value) in _common_primitives: -        return t.cast("V", value) - -    if inspect.isawaitable(value): -        return await t.cast("t.Awaitable[V]", value) - -    return t.cast("V", value) - - -async def auto_aiter( -    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": -    if hasattr(iterable, "__aiter__"): -        async for item in t.cast("t.AsyncIterable[V]", iterable): -            yield item -    else: -        for item in iterable: -            yield item - - -async def auto_to_list( -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: -    return [x async for x in auto_aiter(value)] diff --git a/venv/lib/python3.11/site-packages/jinja2/bccache.py b/venv/lib/python3.11/site-packages/jinja2/bccache.py deleted file mode 100644 index d0ddf56..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,406 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment - -    class _MemcachedClient(te.Protocol): -        def get(self, key: str) -> bytes: -            ... - -        def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: -            ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( -    b"j2" -    + pickle.dumps(bc_version, 2) -    + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: -    """Buckets are used to store the bytecode for one template.  It's created -    and initialized by the bytecode cache and passed to the loading functions. - -    The buckets get an internal checksum from the cache assigned and use this -    to automatically reject outdated cache material.  Individual bytecode -    cache subclasses don't have to care about cache invalidation. -    """ - -    def __init__(self, environment: "Environment", key: str, checksum: str) -> None: -        self.environment = environment -        self.key = key -        self.checksum = checksum -        self.reset() - -    def reset(self) -> None: -        """Resets the bucket (unloads the bytecode).""" -        self.code: t.Optional[CodeType] = None - -    def load_bytecode(self, f: t.BinaryIO) -> None: -        """Loads bytecode from a file or file like object.""" -        # make sure the magic header is correct -        magic = f.read(len(bc_magic)) -        if magic != bc_magic: -            self.reset() -            return -        # the source code of the file changed, we need to reload -        checksum = pickle.load(f) -        if self.checksum != checksum: -            self.reset() -            return -        # if marshal_load fails then we need to reload -        try: -            self.code = marshal.load(f) -        except (EOFError, ValueError, TypeError): -            self.reset() -            return - -    def write_bytecode(self, f: t.IO[bytes]) -> None: -        """Dump the bytecode into the file or file like object passed.""" -        if self.code is None: -            raise TypeError("can't write empty bucket") -        f.write(bc_magic) -        pickle.dump(self.checksum, f, 2) -        marshal.dump(self.code, f) - -    def bytecode_from_string(self, string: bytes) -> None: -        """Load bytecode from bytes.""" -        self.load_bytecode(BytesIO(string)) - -    def bytecode_to_string(self) -> bytes: -        """Return the bytecode as bytes.""" -        out = BytesIO() -        self.write_bytecode(out) -        return out.getvalue() - - -class BytecodeCache: -    """To implement your own bytecode cache you have to subclass this class -    and override :meth:`load_bytecode` and :meth:`dump_bytecode`.  Both of -    these methods are passed a :class:`~jinja2.bccache.Bucket`. - -    A very basic bytecode cache that saves the bytecode on the file system:: - -        from os import path - -        class MyCache(BytecodeCache): - -            def __init__(self, directory): -                self.directory = directory - -            def load_bytecode(self, bucket): -                filename = path.join(self.directory, bucket.key) -                if path.exists(filename): -                    with open(filename, 'rb') as f: -                        bucket.load_bytecode(f) - -            def dump_bytecode(self, bucket): -                filename = path.join(self.directory, bucket.key) -                with open(filename, 'wb') as f: -                    bucket.write_bytecode(f) - -    A more advanced version of a filesystem based bytecode cache is part of -    Jinja. -    """ - -    def load_bytecode(self, bucket: Bucket) -> None: -        """Subclasses have to override this method to load bytecode into a -        bucket.  If they are not able to find code in the cache for the -        bucket, it must not do anything. -        """ -        raise NotImplementedError() - -    def dump_bytecode(self, bucket: Bucket) -> None: -        """Subclasses have to override this method to write the bytecode -        from a bucket back to the cache.  If it unable to do so it must not -        fail silently but raise an exception. -        """ -        raise NotImplementedError() - -    def clear(self) -> None: -        """Clears the cache.  This method is not used by Jinja but should be -        implemented to allow applications to clear the bytecode cache used -        by a particular environment. -        """ - -    def get_cache_key( -        self, name: str, filename: t.Optional[t.Union[str]] = None -    ) -> str: -        """Returns the unique hash key for this template name.""" -        hash = sha1(name.encode("utf-8")) - -        if filename is not None: -            hash.update(f"|{filename}".encode()) - -        return hash.hexdigest() - -    def get_source_checksum(self, source: str) -> str: -        """Returns a checksum for the source.""" -        return sha1(source.encode("utf-8")).hexdigest() - -    def get_bucket( -        self, -        environment: "Environment", -        name: str, -        filename: t.Optional[str], -        source: str, -    ) -> Bucket: -        """Return a cache bucket for the given template.  All arguments are -        mandatory but filename may be `None`. -        """ -        key = self.get_cache_key(name, filename) -        checksum = self.get_source_checksum(source) -        bucket = Bucket(environment, key, checksum) -        self.load_bytecode(bucket) -        return bucket - -    def set_bucket(self, bucket: Bucket) -> None: -        """Put the bucket into the cache.""" -        self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): -    """A bytecode cache that stores bytecode on the filesystem.  It accepts -    two arguments: The directory where the cache items are stored and a -    pattern string that is used to build the filename. - -    If no directory is specified a default cache directory is selected.  On -    Windows the user's temp directory is used, on UNIX systems a directory -    is created for the user in the system temp directory. - -    The pattern can be used to have multiple separate caches operate on the -    same directory.  The default pattern is ``'__jinja2_%s.cache'``.  ``%s`` -    is replaced with the cache key. - -    >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - -    This bytecode cache supports clearing of the cache using the clear method. -    """ - -    def __init__( -        self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" -    ) -> None: -        if directory is None: -            directory = self._get_default_cache_dir() -        self.directory = directory -        self.pattern = pattern - -    def _get_default_cache_dir(self) -> str: -        def _unsafe_dir() -> "te.NoReturn": -            raise RuntimeError( -                "Cannot determine safe temp directory.  You " -                "need to explicitly provide one." -            ) - -        tmpdir = tempfile.gettempdir() - -        # On windows the temporary directory is used specific unless -        # explicitly forced otherwise.  We can just use that. -        if os.name == "nt": -            return tmpdir -        if not hasattr(os, "getuid"): -            _unsafe_dir() - -        dirname = f"_jinja2-cache-{os.getuid()}" -        actual_dir = os.path.join(tmpdir, dirname) - -        try: -            os.mkdir(actual_dir, stat.S_IRWXU) -        except OSError as e: -            if e.errno != errno.EEXIST: -                raise -        try: -            os.chmod(actual_dir, stat.S_IRWXU) -            actual_dir_stat = os.lstat(actual_dir) -            if ( -                actual_dir_stat.st_uid != os.getuid() -                or not stat.S_ISDIR(actual_dir_stat.st_mode) -                or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU -            ): -                _unsafe_dir() -        except OSError as e: -            if e.errno != errno.EEXIST: -                raise - -        actual_dir_stat = os.lstat(actual_dir) -        if ( -            actual_dir_stat.st_uid != os.getuid() -            or not stat.S_ISDIR(actual_dir_stat.st_mode) -            or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU -        ): -            _unsafe_dir() - -        return actual_dir - -    def _get_cache_filename(self, bucket: Bucket) -> str: -        return os.path.join(self.directory, self.pattern % (bucket.key,)) - -    def load_bytecode(self, bucket: Bucket) -> None: -        filename = self._get_cache_filename(bucket) - -        # Don't test for existence before opening the file, since the -        # file could disappear after the test before the open. -        try: -            f = open(filename, "rb") -        except (FileNotFoundError, IsADirectoryError, PermissionError): -            # PermissionError can occur on Windows when an operation is -            # in progress, such as calling clear(). -            return - -        with f: -            bucket.load_bytecode(f) - -    def dump_bytecode(self, bucket: Bucket) -> None: -        # Write to a temporary file, then rename to the real name after -        # writing. This avoids another process reading the file before -        # it is fully written. -        name = self._get_cache_filename(bucket) -        f = tempfile.NamedTemporaryFile( -            mode="wb", -            dir=os.path.dirname(name), -            prefix=os.path.basename(name), -            suffix=".tmp", -            delete=False, -        ) - -        def remove_silent() -> None: -            try: -                os.remove(f.name) -            except OSError: -                # Another process may have called clear(). On Windows, -                # another program may be holding the file open. -                pass - -        try: -            with f: -                bucket.write_bytecode(f) -        except BaseException: -            remove_silent() -            raise - -        try: -            os.replace(f.name, name) -        except OSError: -            # Another process may have called clear(). On Windows, -            # another program may be holding the file open. -            remove_silent() -        except BaseException: -            remove_silent() -            raise - -    def clear(self) -> None: -        # imported lazily here because google app-engine doesn't support -        # write access on the file system and the function does not exist -        # normally. -        from os import remove - -        files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) -        for filename in files: -            try: -                remove(os.path.join(self.directory, filename)) -            except OSError: -                pass - - -class MemcachedBytecodeCache(BytecodeCache): -    """This class implements a bytecode cache that uses a memcache cache for -    storing the information.  It does not enforce a specific memcache library -    (tummy's memcache or cmemcache) but will accept any class that provides -    the minimal interface required. - -    Libraries compatible with this class: - -    -   `cachelib <https://github.com/pallets/cachelib>`_ -    -   `python-memcached <https://pypi.org/project/python-memcached/>`_ - -    (Unfortunately the django cache interface is not compatible because it -    does not support storing binary data, only text. You can however pass -    the underlying cache client to the bytecode cache which is available -    as `django.core.cache.cache._client`.) - -    The minimal interface for the client passed to the constructor is this: - -    .. class:: MinimalClientInterface - -        .. method:: set(key, value[, timeout]) - -            Stores the bytecode in the cache.  `value` is a string and -            `timeout` the timeout of the key.  If timeout is not provided -            a default timeout or no timeout should be assumed, if it's -            provided it's an integer with the number of seconds the cache -            item should exist. - -        .. method:: get(key) - -            Returns the value for the cache key.  If the item does not -            exist in the cache the return value must be `None`. - -    The other arguments to the constructor are the prefix for all keys that -    is added before the actual cache key and the timeout for the bytecode in -    the cache system.  We recommend a high (or no) timeout. - -    This bytecode cache does not support clearing of used items in the cache. -    The clear method is a no-operation function. - -    .. versionadded:: 2.7 -       Added support for ignoring memcache errors through the -       `ignore_memcache_errors` parameter. -    """ - -    def __init__( -        self, -        client: "_MemcachedClient", -        prefix: str = "jinja2/bytecode/", -        timeout: t.Optional[int] = None, -        ignore_memcache_errors: bool = True, -    ): -        self.client = client -        self.prefix = prefix -        self.timeout = timeout -        self.ignore_memcache_errors = ignore_memcache_errors - -    def load_bytecode(self, bucket: Bucket) -> None: -        try: -            code = self.client.get(self.prefix + bucket.key) -        except Exception: -            if not self.ignore_memcache_errors: -                raise -        else: -            bucket.bytecode_from_string(code) - -    def dump_bytecode(self, bucket: Bucket) -> None: -        key = self.prefix + bucket.key -        value = bucket.bytecode_to_string() - -        try: -            if self.timeout is not None: -                self.client.set(key, value, self.timeout) -            else: -                self.client.set(key, value) -        except Exception: -            if not self.ignore_memcache_errors: -                raise diff --git a/venv/lib/python3.11/site-packages/jinja2/compiler.py b/venv/lib/python3.11/site-packages/jinja2/compiler.py deleted file mode 100644 index ff95c80..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1956 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { -    "eq": "==", -    "ne": "!=", -    "gt": ">", -    "gteq": ">=", -    "lt": "<", -    "lteq": "<=", -    "in": "in", -    "notin": "not in", -} - - -def optimizeconst(f: F) -> F: -    def new_func( -        self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any -    ) -> t.Any: -        # Only optimize if the frame is not volatile -        if self.optimizer is not None and not frame.eval_ctx.volatile: -            new_node = self.optimizer.visit(node, frame.eval_ctx) - -            if new_node != node: -                return self.visit(new_node, frame) - -        return f(self, node, frame, **kwargs) - -    return update_wrapper(t.cast(F, new_func), f) - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: -    @optimizeconst -    def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: -        if ( -            self.environment.sandboxed -            and op in self.environment.intercepted_binops  # type: ignore -        ): -            self.write(f"environment.call_binop(context, {op!r}, ") -            self.visit(node.left, frame) -            self.write(", ") -            self.visit(node.right, frame) -        else: -            self.write("(") -            self.visit(node.left, frame) -            self.write(f" {op} ") -            self.visit(node.right, frame) - -        self.write(")") - -    return visitor - - -def _make_unop( -    op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: -    @optimizeconst -    def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: -        if ( -            self.environment.sandboxed -            and op in self.environment.intercepted_unops  # type: ignore -        ): -            self.write(f"environment.call_unop(context, {op!r}, ") -            self.visit(node.node, frame) -        else: -            self.write("(" + op) -            self.visit(node.node, frame) - -        self.write(")") - -    return visitor - - -def generate( -    node: nodes.Template, -    environment: "Environment", -    name: t.Optional[str], -    filename: t.Optional[str], -    stream: t.Optional[t.TextIO] = None, -    defer_init: bool = False, -    optimized: bool = True, -) -> t.Optional[str]: -    """Generate the python source for a node tree.""" -    if not isinstance(node, nodes.Template): -        raise TypeError("Can't compile non template nodes") - -    generator = environment.code_generator_class( -        environment, name, filename, stream, defer_init, optimized -    ) -    generator.visit(node) - -    if stream is None: -        return generator.stream.getvalue()  # type: ignore - -    return None - - -def has_safe_repr(value: t.Any) -> bool: -    """Does the node have a safe representation?""" -    if value is None or value is NotImplemented or value is Ellipsis: -        return True - -    if type(value) in {bool, int, float, complex, range, str, Markup}: -        return True - -    if type(value) in {tuple, list, set, frozenset}: -        return all(has_safe_repr(v) for v in value) - -    if type(value) is dict: -        return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - -    return False - - -def find_undeclared( -    nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: -    """Check if the names passed are accessed undeclared.  The return value -    is a set of all the undeclared names from the sequence of names found. -    """ -    visitor = UndeclaredNameVisitor(names) -    try: -        for node in nodes: -            visitor.visit(node) -    except VisitorExit: -        pass -    return visitor.undeclared - - -class MacroRef: -    def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: -        self.node = node -        self.accesses_caller = False -        self.accesses_kwargs = False -        self.accesses_varargs = False - - -class Frame: -    """Holds compile time information for us.""" - -    def __init__( -        self, -        eval_ctx: EvalContext, -        parent: t.Optional["Frame"] = None, -        level: t.Optional[int] = None, -    ) -> None: -        self.eval_ctx = eval_ctx - -        # the parent of this frame -        self.parent = parent - -        if parent is None: -            self.symbols = Symbols(level=level) - -            # in some dynamic inheritance situations the compiler needs to add -            # write tests around output statements. -            self.require_output_check = False - -            # inside some tags we are using a buffer rather than yield statements. -            # this for example affects {% filter %} or {% macro %}.  If a frame -            # is buffered this variable points to the name of the list used as -            # buffer. -            self.buffer: t.Optional[str] = None - -            # the name of the block we're in, otherwise None. -            self.block: t.Optional[str] = None - -        else: -            self.symbols = Symbols(parent.symbols, level=level) -            self.require_output_check = parent.require_output_check -            self.buffer = parent.buffer -            self.block = parent.block - -        # a toplevel frame is the root + soft frames such as if conditions. -        self.toplevel = False - -        # the root frame is basically just the outermost frame, so no if -        # conditions.  This information is used to optimize inheritance -        # situations. -        self.rootlevel = False - -        # variables set inside of loops and blocks should not affect outer frames, -        # but they still needs to be kept track of as part of the active context. -        self.loop_frame = False -        self.block_frame = False - -        # track whether the frame is being used in an if-statement or conditional -        # expression as it determines which errors should be raised during runtime -        # or compile time. -        self.soft_frame = False - -    def copy(self) -> "Frame": -        """Create a copy of the current one.""" -        rv = object.__new__(self.__class__) -        rv.__dict__.update(self.__dict__) -        rv.symbols = self.symbols.copy() -        return rv - -    def inner(self, isolated: bool = False) -> "Frame": -        """Return an inner frame.""" -        if isolated: -            return Frame(self.eval_ctx, level=self.symbols.level + 1) -        return Frame(self.eval_ctx, self) - -    def soft(self) -> "Frame": -        """Return a soft frame.  A soft frame may not be modified as -        standalone thing as it shares the resources with the frame it -        was created of, but it's not a rootlevel frame any longer. - -        This is only used to implement if-statements and conditional -        expressions. -        """ -        rv = self.copy() -        rv.rootlevel = False -        rv.soft_frame = True -        return rv - -    __copy__ = copy - - -class VisitorExit(RuntimeError): -    """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): -    """A visitor that collects filter and test calls.""" - -    def __init__(self) -> None: -        self.filters: t.Set[str] = set() -        self.tests: t.Set[str] = set() - -    def visit_Filter(self, node: nodes.Filter) -> None: -        self.generic_visit(node) -        self.filters.add(node.name) - -    def visit_Test(self, node: nodes.Test) -> None: -        self.generic_visit(node) -        self.tests.add(node.name) - -    def visit_Block(self, node: nodes.Block) -> None: -        """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): -    """A visitor that checks if a name is accessed without being -    declared.  This is different from the frame visitor as it will -    not stop at closure frames. -    """ - -    def __init__(self, names: t.Iterable[str]) -> None: -        self.names = set(names) -        self.undeclared: t.Set[str] = set() - -    def visit_Name(self, node: nodes.Name) -> None: -        if node.ctx == "load" and node.name in self.names: -            self.undeclared.add(node.name) -            if self.undeclared == self.names: -                raise VisitorExit() -        else: -            self.names.discard(node.name) - -    def visit_Block(self, node: nodes.Block) -> None: -        """Stop visiting a blocks.""" - - -class CompilerExit(Exception): -    """Raised if the compiler encountered a situation where it just -    doesn't make sense to further process the code.  Any block that -    raises such an exception is not further processed. -    """ - - -class CodeGenerator(NodeVisitor): -    def __init__( -        self, -        environment: "Environment", -        name: t.Optional[str], -        filename: t.Optional[str], -        stream: t.Optional[t.TextIO] = None, -        defer_init: bool = False, -        optimized: bool = True, -    ) -> None: -        if stream is None: -            stream = StringIO() -        self.environment = environment -        self.name = name -        self.filename = filename -        self.stream = stream -        self.created_block_context = False -        self.defer_init = defer_init -        self.optimizer: t.Optional[Optimizer] = None - -        if optimized: -            self.optimizer = Optimizer(environment) - -        # aliases for imports -        self.import_aliases: t.Dict[str, str] = {} - -        # a registry for all blocks.  Because blocks are moved out -        # into the global python scope they are registered here -        self.blocks: t.Dict[str, nodes.Block] = {} - -        # the number of extends statements so far -        self.extends_so_far = 0 - -        # some templates have a rootlevel extends.  In this case we -        # can safely assume that we're a child template and do some -        # more optimizations. -        self.has_known_extends = False - -        # the current line number -        self.code_lineno = 1 - -        # registry of all filters and tests (global, not block local) -        self.tests: t.Dict[str, str] = {} -        self.filters: t.Dict[str, str] = {} - -        # the debug information -        self.debug_info: t.List[t.Tuple[int, int]] = [] -        self._write_debug_info: t.Optional[int] = None - -        # the number of new lines before the next write() -        self._new_lines = 0 - -        # the line number of the last written statement -        self._last_line = 0 - -        # true if nothing was written so far. -        self._first_write = True - -        # used by the `temporary_identifier` method to get new -        # unique, temporary identifier -        self._last_identifier = 0 - -        # the current indentation -        self._indentation = 0 - -        # Tracks toplevel assignments -        self._assign_stack: t.List[t.Set[str]] = [] - -        # Tracks parameter definition blocks -        self._param_def_block: t.List[t.Set[str]] = [] - -        # Tracks the current context. -        self._context_reference_stack = ["context"] - -    @property -    def optimized(self) -> bool: -        return self.optimizer is not None - -    # -- Various compilation helpers - -    def fail(self, msg: str, lineno: int) -> "te.NoReturn": -        """Fail with a :exc:`TemplateAssertionError`.""" -        raise TemplateAssertionError(msg, lineno, self.name, self.filename) - -    def temporary_identifier(self) -> str: -        """Get a new unique identifier.""" -        self._last_identifier += 1 -        return f"t_{self._last_identifier}" - -    def buffer(self, frame: Frame) -> None: -        """Enable buffering for the frame from that point onwards.""" -        frame.buffer = self.temporary_identifier() -        self.writeline(f"{frame.buffer} = []") - -    def return_buffer_contents( -        self, frame: Frame, force_unescaped: bool = False -    ) -> None: -        """Return the buffer contents of the frame.""" -        if not force_unescaped: -            if frame.eval_ctx.volatile: -                self.writeline("if context.eval_ctx.autoescape:") -                self.indent() -                self.writeline(f"return Markup(concat({frame.buffer}))") -                self.outdent() -                self.writeline("else:") -                self.indent() -                self.writeline(f"return concat({frame.buffer})") -                self.outdent() -                return -            elif frame.eval_ctx.autoescape: -                self.writeline(f"return Markup(concat({frame.buffer}))") -                return -        self.writeline(f"return concat({frame.buffer})") - -    def indent(self) -> None: -        """Indent by one.""" -        self._indentation += 1 - -    def outdent(self, step: int = 1) -> None: -        """Outdent by step.""" -        self._indentation -= step - -    def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: -        """Yield or write into the frame buffer.""" -        if frame.buffer is None: -            self.writeline("yield ", node) -        else: -            self.writeline(f"{frame.buffer}.append(", node) - -    def end_write(self, frame: Frame) -> None: -        """End the writing process started by `start_write`.""" -        if frame.buffer is not None: -            self.write(")") - -    def simple_write( -        self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None -    ) -> None: -        """Simple shortcut for start_write + write + end_write.""" -        self.start_write(frame, node) -        self.write(s) -        self.end_write(frame) - -    def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: -        """Visit a list of nodes as block in a frame.  If the current frame -        is no buffer a dummy ``if 0: yield None`` is written automatically. -        """ -        try: -            self.writeline("pass") -            for node in nodes: -                self.visit(node, frame) -        except CompilerExit: -            pass - -    def write(self, x: str) -> None: -        """Write a string into the output stream.""" -        if self._new_lines: -            if not self._first_write: -                self.stream.write("\n" * self._new_lines) -                self.code_lineno += self._new_lines -                if self._write_debug_info is not None: -                    self.debug_info.append((self._write_debug_info, self.code_lineno)) -                    self._write_debug_info = None -            self._first_write = False -            self.stream.write("    " * self._indentation) -            self._new_lines = 0 -        self.stream.write(x) - -    def writeline( -        self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 -    ) -> None: -        """Combination of newline and write.""" -        self.newline(node, extra) -        self.write(x) - -    def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: -        """Add one or more newlines before the next write.""" -        self._new_lines = max(self._new_lines, 1 + extra) -        if node is not None and node.lineno != self._last_line: -            self._write_debug_info = node.lineno -            self._last_line = node.lineno - -    def signature( -        self, -        node: t.Union[nodes.Call, nodes.Filter, nodes.Test], -        frame: Frame, -        extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, -    ) -> None: -        """Writes a function call to the stream for the current node. -        A leading comma is added automatically.  The extra keyword -        arguments may not include python keywords otherwise a syntax -        error could occur.  The extra keyword arguments should be given -        as python dict. -        """ -        # if any of the given keyword arguments is a python keyword -        # we have to make sure that no invalid call is created. -        kwarg_workaround = any( -            is_python_keyword(t.cast(str, k)) -            for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) -        ) - -        for arg in node.args: -            self.write(", ") -            self.visit(arg, frame) - -        if not kwarg_workaround: -            for kwarg in node.kwargs: -                self.write(", ") -                self.visit(kwarg, frame) -            if extra_kwargs is not None: -                for key, value in extra_kwargs.items(): -                    self.write(f", {key}={value}") -        if node.dyn_args: -            self.write(", *") -            self.visit(node.dyn_args, frame) - -        if kwarg_workaround: -            if node.dyn_kwargs is not None: -                self.write(", **dict({") -            else: -                self.write(", **{") -            for kwarg in node.kwargs: -                self.write(f"{kwarg.key!r}: ") -                self.visit(kwarg.value, frame) -                self.write(", ") -            if extra_kwargs is not None: -                for key, value in extra_kwargs.items(): -                    self.write(f"{key!r}: {value}, ") -            if node.dyn_kwargs is not None: -                self.write("}, **") -                self.visit(node.dyn_kwargs, frame) -                self.write(")") -            else: -                self.write("}") - -        elif node.dyn_kwargs is not None: -            self.write(", **") -            self.visit(node.dyn_kwargs, frame) - -    def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: -        """Find all filter and test names used in the template and -        assign them to variables in the compiled namespace. Checking -        that the names are registered with the environment is done when -        compiling the Filter and Test nodes. If the node is in an If or -        CondExpr node, the check is done at runtime instead. - -        .. versionchanged:: 3.0 -            Filters and tests in If and CondExpr nodes are checked at -            runtime instead of compile time. -        """ -        visitor = DependencyFinderVisitor() - -        for node in nodes: -            visitor.visit(node) - -        for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( -            self.tests, -            visitor.tests, -            "tests", -        ): -            for name in sorted(names): -                if name not in id_map: -                    id_map[name] = self.temporary_identifier() - -                # add check during runtime that dependencies used inside of executed -                # blocks are defined, as this step may be skipped during compile time -                self.writeline("try:") -                self.indent() -                self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") -                self.outdent() -                self.writeline("except KeyError:") -                self.indent() -                self.writeline("@internalcode") -                self.writeline(f"def {id_map[name]}(*unused):") -                self.indent() -                self.writeline( -                    f'raise TemplateRuntimeError("No {dependency[:-1]}' -                    f' named {name!r} found.")' -                ) -                self.outdent() -                self.outdent() - -    def enter_frame(self, frame: Frame) -> None: -        undefs = [] -        for target, (action, param) in frame.symbols.loads.items(): -            if action == VAR_LOAD_PARAMETER: -                pass -            elif action == VAR_LOAD_RESOLVE: -                self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") -            elif action == VAR_LOAD_ALIAS: -                self.writeline(f"{target} = {param}") -            elif action == VAR_LOAD_UNDEFINED: -                undefs.append(target) -            else: -                raise NotImplementedError("unknown load instruction") -        if undefs: -            self.writeline(f"{' = '.join(undefs)} = missing") - -    def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: -        if not with_python_scope: -            undefs = [] -            for target in frame.symbols.loads: -                undefs.append(target) -            if undefs: -                self.writeline(f"{' = '.join(undefs)} = missing") - -    def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: -        return async_value if self.environment.is_async else sync_value - -    def func(self, name: str) -> str: -        return f"{self.choose_async()}def {name}" - -    def macro_body( -        self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame -    ) -> t.Tuple[Frame, MacroRef]: -        """Dump the function def of a macro or call block.""" -        frame = frame.inner() -        frame.symbols.analyze_node(node) -        macro_ref = MacroRef(node) - -        explicit_caller = None -        skip_special_params = set() -        args = [] - -        for idx, arg in enumerate(node.args): -            if arg.name == "caller": -                explicit_caller = idx -            if arg.name in ("kwargs", "varargs"): -                skip_special_params.add(arg.name) -            args.append(frame.symbols.ref(arg.name)) - -        undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - -        if "caller" in undeclared: -            # In older Jinja versions there was a bug that allowed caller -            # to retain the special behavior even if it was mentioned in -            # the argument list.  However thankfully this was only really -            # working if it was the last argument.  So we are explicitly -            # checking this now and error out if it is anywhere else in -            # the argument list. -            if explicit_caller is not None: -                try: -                    node.defaults[explicit_caller - len(node.args)] -                except IndexError: -                    self.fail( -                        "When defining macros or call blocks the " -                        'special "caller" argument must be omitted ' -                        "or be given a default.", -                        node.lineno, -                    ) -            else: -                args.append(frame.symbols.declare_parameter("caller")) -            macro_ref.accesses_caller = True -        if "kwargs" in undeclared and "kwargs" not in skip_special_params: -            args.append(frame.symbols.declare_parameter("kwargs")) -            macro_ref.accesses_kwargs = True -        if "varargs" in undeclared and "varargs" not in skip_special_params: -            args.append(frame.symbols.declare_parameter("varargs")) -            macro_ref.accesses_varargs = True - -        # macros are delayed, they never require output checks -        frame.require_output_check = False -        frame.symbols.analyze_node(node) -        self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) -        self.indent() - -        self.buffer(frame) -        self.enter_frame(frame) - -        self.push_parameter_definitions(frame) -        for idx, arg in enumerate(node.args): -            ref = frame.symbols.ref(arg.name) -            self.writeline(f"if {ref} is missing:") -            self.indent() -            try: -                default = node.defaults[idx - len(node.args)] -            except IndexError: -                self.writeline( -                    f'{ref} = undefined("parameter {arg.name!r} was not provided",' -                    f" name={arg.name!r})" -                ) -            else: -                self.writeline(f"{ref} = ") -                self.visit(default, frame) -            self.mark_parameter_stored(ref) -            self.outdent() -        self.pop_parameter_definitions() - -        self.blockvisit(node.body, frame) -        self.return_buffer_contents(frame, force_unescaped=True) -        self.leave_frame(frame, with_python_scope=True) -        self.outdent() - -        return frame, macro_ref - -    def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: -        """Dump the macro definition for the def created by macro_body.""" -        arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) -        name = getattr(macro_ref.node, "name", None) -        if len(macro_ref.node.args) == 1: -            arg_tuple += "," -        self.write( -            f"Macro(environment, macro, {name!r}, ({arg_tuple})," -            f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," -            f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" -        ) - -    def position(self, node: nodes.Node) -> str: -        """Return a human readable position for the node.""" -        rv = f"line {node.lineno}" -        if self.name is not None: -            rv = f"{rv} in {self.name!r}" -        return rv - -    def dump_local_context(self, frame: Frame) -> str: -        items_kv = ", ".join( -            f"{name!r}: {target}" -            for name, target in frame.symbols.dump_stores().items() -        ) -        return f"{{{items_kv}}}" - -    def write_commons(self) -> None: -        """Writes a common preamble that is used by root and block functions. -        Primarily this sets up common local helpers and enforces a generator -        through a dead branch. -        """ -        self.writeline("resolve = context.resolve_or_missing") -        self.writeline("undefined = environment.undefined") -        self.writeline("concat = environment.concat") -        # always use the standard Undefined class for the implicit else of -        # conditional expressions -        self.writeline("cond_expr_undefined = Undefined") -        self.writeline("if 0: yield None") - -    def push_parameter_definitions(self, frame: Frame) -> None: -        """Pushes all parameter targets from the given frame into a local -        stack that permits tracking of yet to be assigned parameters.  In -        particular this enables the optimization from `visit_Name` to skip -        undefined expressions for parameters in macros as macros can reference -        otherwise unbound parameters. -        """ -        self._param_def_block.append(frame.symbols.dump_param_targets()) - -    def pop_parameter_definitions(self) -> None: -        """Pops the current parameter definitions set.""" -        self._param_def_block.pop() - -    def mark_parameter_stored(self, target: str) -> None: -        """Marks a parameter in the current parameter definitions as stored. -        This will skip the enforced undefined checks. -        """ -        if self._param_def_block: -            self._param_def_block[-1].discard(target) - -    def push_context_reference(self, target: str) -> None: -        self._context_reference_stack.append(target) - -    def pop_context_reference(self) -> None: -        self._context_reference_stack.pop() - -    def get_context_ref(self) -> str: -        return self._context_reference_stack[-1] - -    def get_resolve_func(self) -> str: -        target = self._context_reference_stack[-1] -        if target == "context": -            return "resolve" -        return f"{target}.resolve" - -    def derive_context(self, frame: Frame) -> str: -        return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - -    def parameter_is_undeclared(self, target: str) -> bool: -        """Checks if a given target is an undeclared parameter.""" -        if not self._param_def_block: -            return False -        return target in self._param_def_block[-1] - -    def push_assign_tracking(self) -> None: -        """Pushes a new layer for assignment tracking.""" -        self._assign_stack.append(set()) - -    def pop_assign_tracking(self, frame: Frame) -> None: -        """Pops the topmost level for assignment tracking and updates the -        context variables if necessary. -        """ -        vars = self._assign_stack.pop() -        if ( -            not frame.block_frame -            and not frame.loop_frame -            and not frame.toplevel -            or not vars -        ): -            return -        public_names = [x for x in vars if x[:1] != "_"] -        if len(vars) == 1: -            name = next(iter(vars)) -            ref = frame.symbols.ref(name) -            if frame.loop_frame: -                self.writeline(f"_loop_vars[{name!r}] = {ref}") -                return -            if frame.block_frame: -                self.writeline(f"_block_vars[{name!r}] = {ref}") -                return -            self.writeline(f"context.vars[{name!r}] = {ref}") -        else: -            if frame.loop_frame: -                self.writeline("_loop_vars.update({") -            elif frame.block_frame: -                self.writeline("_block_vars.update({") -            else: -                self.writeline("context.vars.update({") -            for idx, name in enumerate(vars): -                if idx: -                    self.write(", ") -                ref = frame.symbols.ref(name) -                self.write(f"{name!r}: {ref}") -            self.write("})") -        if not frame.block_frame and not frame.loop_frame and public_names: -            if len(public_names) == 1: -                self.writeline(f"context.exported_vars.add({public_names[0]!r})") -            else: -                names_str = ", ".join(map(repr, public_names)) -                self.writeline(f"context.exported_vars.update(({names_str}))") - -    # -- Statement Visitors - -    def visit_Template( -        self, node: nodes.Template, frame: t.Optional[Frame] = None -    ) -> None: -        assert frame is None, "no root frame allowed" -        eval_ctx = EvalContext(self.environment, self.name) - -        from .runtime import exported, async_exported - -        if self.environment.is_async: -            exported_names = sorted(exported + async_exported) -        else: -            exported_names = sorted(exported) - -        self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - -        # if we want a deferred initialization we cannot move the -        # environment into a local name -        envenv = "" if self.defer_init else ", environment=environment" - -        # do we have an extends tag at all?  If not, we can save some -        # overhead by just not processing any inheritance code. -        have_extends = node.find(nodes.Extends) is not None - -        # find all blocks -        for block in node.find_all(nodes.Block): -            if block.name in self.blocks: -                self.fail(f"block {block.name!r} defined twice", block.lineno) -            self.blocks[block.name] = block - -        # find all imports and import them -        for import_ in node.find_all(nodes.ImportedName): -            if import_.importname not in self.import_aliases: -                imp = import_.importname -                self.import_aliases[imp] = alias = self.temporary_identifier() -                if "." in imp: -                    module, obj = imp.rsplit(".", 1) -                    self.writeline(f"from {module} import {obj} as {alias}") -                else: -                    self.writeline(f"import {imp} as {alias}") - -        # add the load name -        self.writeline(f"name = {self.name!r}") - -        # generate the root render function. -        self.writeline( -            f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 -        ) -        self.indent() -        self.write_commons() - -        # process the root -        frame = Frame(eval_ctx) -        if "self" in find_undeclared(node.body, ("self",)): -            ref = frame.symbols.declare_parameter("self") -            self.writeline(f"{ref} = TemplateReference(context)") -        frame.symbols.analyze_node(node) -        frame.toplevel = frame.rootlevel = True -        frame.require_output_check = have_extends and not self.has_known_extends -        if have_extends: -            self.writeline("parent_template = None") -        self.enter_frame(frame) -        self.pull_dependencies(node.body) -        self.blockvisit(node.body, frame) -        self.leave_frame(frame, with_python_scope=True) -        self.outdent() - -        # make sure that the parent root is called. -        if have_extends: -            if not self.has_known_extends: -                self.indent() -                self.writeline("if parent_template is not None:") -            self.indent() -            if not self.environment.is_async: -                self.writeline("yield from parent_template.root_render_func(context)") -            else: -                self.writeline( -                    "async for event in parent_template.root_render_func(context):" -                ) -                self.indent() -                self.writeline("yield event") -                self.outdent() -            self.outdent(1 + (not self.has_known_extends)) - -        # at this point we now have the blocks collected and can visit them too. -        for name, block in self.blocks.items(): -            self.writeline( -                f"{self.func('block_' + name)}(context, missing=missing{envenv}):", -                block, -                1, -            ) -            self.indent() -            self.write_commons() -            # It's important that we do not make this frame a child of the -            # toplevel template.  This would cause a variety of -            # interesting issues with identifier tracking. -            block_frame = Frame(eval_ctx) -            block_frame.block_frame = True -            undeclared = find_undeclared(block.body, ("self", "super")) -            if "self" in undeclared: -                ref = block_frame.symbols.declare_parameter("self") -                self.writeline(f"{ref} = TemplateReference(context)") -            if "super" in undeclared: -                ref = block_frame.symbols.declare_parameter("super") -                self.writeline(f"{ref} = context.super({name!r}, block_{name})") -            block_frame.symbols.analyze_node(block) -            block_frame.block = name -            self.writeline("_block_vars = {}") -            self.enter_frame(block_frame) -            self.pull_dependencies(block.body) -            self.blockvisit(block.body, block_frame) -            self.leave_frame(block_frame, with_python_scope=True) -            self.outdent() - -        blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) -        self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) -        debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) -        self.writeline(f"debug_info = {debug_kv_str!r}") - -    def visit_Block(self, node: nodes.Block, frame: Frame) -> None: -        """Call a block and register it for the template.""" -        level = 0 -        if frame.toplevel: -            # if we know that we are a child template, there is no need to -            # check if we are one -            if self.has_known_extends: -                return -            if self.extends_so_far > 0: -                self.writeline("if parent_template is None:") -                self.indent() -                level += 1 - -        if node.scoped: -            context = self.derive_context(frame) -        else: -            context = self.get_context_ref() - -        if node.required: -            self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) -            self.indent() -            self.writeline( -                f'raise TemplateRuntimeError("Required block {node.name!r} not found")', -                node, -            ) -            self.outdent() - -        if not self.environment.is_async and frame.buffer is None: -            self.writeline( -                f"yield from context.blocks[{node.name!r}][0]({context})", node -            ) -        else: -            self.writeline( -                f"{self.choose_async()}for event in" -                f" context.blocks[{node.name!r}][0]({context}):", -                node, -            ) -            self.indent() -            self.simple_write("event", frame) -            self.outdent() - -        self.outdent(level) - -    def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: -        """Calls the extender.""" -        if not frame.toplevel: -            self.fail("cannot use extend from a non top-level scope", node.lineno) - -        # if the number of extends statements in general is zero so -        # far, we don't have to add a check if something extended -        # the template before this one. -        if self.extends_so_far > 0: -            # if we have a known extends we just add a template runtime -            # error into the generated code.  We could catch that at compile -            # time too, but i welcome it not to confuse users by throwing the -            # same error at different times just "because we can". -            if not self.has_known_extends: -                self.writeline("if parent_template is not None:") -                self.indent() -            self.writeline('raise TemplateRuntimeError("extended multiple times")') - -            # if we have a known extends already we don't need that code here -            # as we know that the template execution will end here. -            if self.has_known_extends: -                raise CompilerExit() -            else: -                self.outdent() - -        self.writeline("parent_template = environment.get_template(", node) -        self.visit(node.template, frame) -        self.write(f", {self.name!r})") -        self.writeline("for name, parent_block in parent_template.blocks.items():") -        self.indent() -        self.writeline("context.blocks.setdefault(name, []).append(parent_block)") -        self.outdent() - -        # if this extends statement was in the root level we can take -        # advantage of that information and simplify the generated code -        # in the top level from this point onwards -        if frame.rootlevel: -            self.has_known_extends = True - -        # and now we have one more -        self.extends_so_far += 1 - -    def visit_Include(self, node: nodes.Include, frame: Frame) -> None: -        """Handles includes.""" -        if node.ignore_missing: -            self.writeline("try:") -            self.indent() - -        func_name = "get_or_select_template" -        if isinstance(node.template, nodes.Const): -            if isinstance(node.template.value, str): -                func_name = "get_template" -            elif isinstance(node.template.value, (tuple, list)): -                func_name = "select_template" -        elif isinstance(node.template, (nodes.Tuple, nodes.List)): -            func_name = "select_template" - -        self.writeline(f"template = environment.{func_name}(", node) -        self.visit(node.template, frame) -        self.write(f", {self.name!r})") -        if node.ignore_missing: -            self.outdent() -            self.writeline("except TemplateNotFound:") -            self.indent() -            self.writeline("pass") -            self.outdent() -            self.writeline("else:") -            self.indent() - -        skip_event_yield = False -        if node.with_context: -            self.writeline( -                f"{self.choose_async()}for event in template.root_render_func(" -                "template.new_context(context.get_all(), True," -                f" {self.dump_local_context(frame)})):" -            ) -        elif self.environment.is_async: -            self.writeline( -                "for event in (await template._get_default_module_async())" -                "._body_stream:" -            ) -        else: -            self.writeline("yield from template._get_default_module()._body_stream") -            skip_event_yield = True - -        if not skip_event_yield: -            self.indent() -            self.simple_write("event", frame) -            self.outdent() - -        if node.ignore_missing: -            self.outdent() - -    def _import_common( -        self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame -    ) -> None: -        self.write(f"{self.choose_async('await ')}environment.get_template(") -        self.visit(node.template, frame) -        self.write(f", {self.name!r}).") - -        if node.with_context: -            f_name = f"make_module{self.choose_async('_async')}" -            self.write( -                f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" -            ) -        else: -            self.write(f"_get_default_module{self.choose_async('_async')}(context)") - -    def visit_Import(self, node: nodes.Import, frame: Frame) -> None: -        """Visit regular imports.""" -        self.writeline(f"{frame.symbols.ref(node.target)} = ", node) -        if frame.toplevel: -            self.write(f"context.vars[{node.target!r}] = ") - -        self._import_common(node, frame) - -        if frame.toplevel and not node.target.startswith("_"): -            self.writeline(f"context.exported_vars.discard({node.target!r})") - -    def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: -        """Visit named imports.""" -        self.newline(node) -        self.write("included_template = ") -        self._import_common(node, frame) -        var_names = [] -        discarded_names = [] -        for name in node.names: -            if isinstance(name, tuple): -                name, alias = name -            else: -                alias = name -            self.writeline( -                f"{frame.symbols.ref(alias)} =" -                f" getattr(included_template, {name!r}, missing)" -            ) -            self.writeline(f"if {frame.symbols.ref(alias)} is missing:") -            self.indent() -            message = ( -                "the template {included_template.__name__!r}" -                f" (imported on {self.position(node)})" -                f" does not export the requested name {name!r}" -            ) -            self.writeline( -                f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" -            ) -            self.outdent() -            if frame.toplevel: -                var_names.append(alias) -                if not alias.startswith("_"): -                    discarded_names.append(alias) - -        if var_names: -            if len(var_names) == 1: -                name = var_names[0] -                self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") -            else: -                names_kv = ", ".join( -                    f"{name!r}: {frame.symbols.ref(name)}" for name in var_names -                ) -                self.writeline(f"context.vars.update({{{names_kv}}})") -        if discarded_names: -            if len(discarded_names) == 1: -                self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") -            else: -                names_str = ", ".join(map(repr, discarded_names)) -                self.writeline( -                    f"context.exported_vars.difference_update(({names_str}))" -                ) - -    def visit_For(self, node: nodes.For, frame: Frame) -> None: -        loop_frame = frame.inner() -        loop_frame.loop_frame = True -        test_frame = frame.inner() -        else_frame = frame.inner() - -        # try to figure out if we have an extended loop.  An extended loop -        # is necessary if the loop is in recursive mode if the special loop -        # variable is accessed in the body if the body is a scoped block. -        extended_loop = ( -            node.recursive -            or "loop" -            in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) -            or any(block.scoped for block in node.find_all(nodes.Block)) -        ) - -        loop_ref = None -        if extended_loop: -            loop_ref = loop_frame.symbols.declare_parameter("loop") - -        loop_frame.symbols.analyze_node(node, for_branch="body") -        if node.else_: -            else_frame.symbols.analyze_node(node, for_branch="else") - -        if node.test: -            loop_filter_func = self.temporary_identifier() -            test_frame.symbols.analyze_node(node, for_branch="test") -            self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) -            self.indent() -            self.enter_frame(test_frame) -            self.writeline(self.choose_async("async for ", "for ")) -            self.visit(node.target, loop_frame) -            self.write(" in ") -            self.write(self.choose_async("auto_aiter(fiter)", "fiter")) -            self.write(":") -            self.indent() -            self.writeline("if ", node.test) -            self.visit(node.test, test_frame) -            self.write(":") -            self.indent() -            self.writeline("yield ") -            self.visit(node.target, loop_frame) -            self.outdent(3) -            self.leave_frame(test_frame, with_python_scope=True) - -        # if we don't have an recursive loop we have to find the shadowed -        # variables at that point.  Because loops can be nested but the loop -        # variable is a special one we have to enforce aliasing for it. -        if node.recursive: -            self.writeline( -                f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node -            ) -            self.indent() -            self.buffer(loop_frame) - -            # Use the same buffer for the else frame -            else_frame.buffer = loop_frame.buffer - -        # make sure the loop variable is a special one and raise a template -        # assertion error if a loop tries to write to loop -        if extended_loop: -            self.writeline(f"{loop_ref} = missing") - -        for name in node.find_all(nodes.Name): -            if name.ctx == "store" and name.name == "loop": -                self.fail( -                    "Can't assign to special loop variable in for-loop target", -                    name.lineno, -                ) - -        if node.else_: -            iteration_indicator = self.temporary_identifier() -            self.writeline(f"{iteration_indicator} = 1") - -        self.writeline(self.choose_async("async for ", "for "), node) -        self.visit(node.target, loop_frame) -        if extended_loop: -            self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") -        else: -            self.write(" in ") - -        if node.test: -            self.write(f"{loop_filter_func}(") -        if node.recursive: -            self.write("reciter") -        else: -            if self.environment.is_async and not extended_loop: -                self.write("auto_aiter(") -            self.visit(node.iter, frame) -            if self.environment.is_async and not extended_loop: -                self.write(")") -        if node.test: -            self.write(")") - -        if node.recursive: -            self.write(", undefined, loop_render_func, depth):") -        else: -            self.write(", undefined):" if extended_loop else ":") - -        self.indent() -        self.enter_frame(loop_frame) - -        self.writeline("_loop_vars = {}") -        self.blockvisit(node.body, loop_frame) -        if node.else_: -            self.writeline(f"{iteration_indicator} = 0") -        self.outdent() -        self.leave_frame( -            loop_frame, with_python_scope=node.recursive and not node.else_ -        ) - -        if node.else_: -            self.writeline(f"if {iteration_indicator}:") -            self.indent() -            self.enter_frame(else_frame) -            self.blockvisit(node.else_, else_frame) -            self.leave_frame(else_frame) -            self.outdent() - -        # if the node was recursive we have to return the buffer contents -        # and start the iteration code -        if node.recursive: -            self.return_buffer_contents(loop_frame) -            self.outdent() -            self.start_write(frame, node) -            self.write(f"{self.choose_async('await ')}loop(") -            if self.environment.is_async: -                self.write("auto_aiter(") -            self.visit(node.iter, frame) -            if self.environment.is_async: -                self.write(")") -            self.write(", loop)") -            self.end_write(frame) - -        # at the end of the iteration, clear any assignments made in the -        # loop from the top level -        if self._assign_stack: -            self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - -    def visit_If(self, node: nodes.If, frame: Frame) -> None: -        if_frame = frame.soft() -        self.writeline("if ", node) -        self.visit(node.test, if_frame) -        self.write(":") -        self.indent() -        self.blockvisit(node.body, if_frame) -        self.outdent() -        for elif_ in node.elif_: -            self.writeline("elif ", elif_) -            self.visit(elif_.test, if_frame) -            self.write(":") -            self.indent() -            self.blockvisit(elif_.body, if_frame) -            self.outdent() -        if node.else_: -            self.writeline("else:") -            self.indent() -            self.blockvisit(node.else_, if_frame) -            self.outdent() - -    def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: -        macro_frame, macro_ref = self.macro_body(node, frame) -        self.newline() -        if frame.toplevel: -            if not node.name.startswith("_"): -                self.write(f"context.exported_vars.add({node.name!r})") -            self.writeline(f"context.vars[{node.name!r}] = ") -        self.write(f"{frame.symbols.ref(node.name)} = ") -        self.macro_def(macro_ref, macro_frame) - -    def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: -        call_frame, macro_ref = self.macro_body(node, frame) -        self.writeline("caller = ") -        self.macro_def(macro_ref, call_frame) -        self.start_write(frame, node) -        self.visit_Call(node.call, frame, forward_caller=True) -        self.end_write(frame) - -    def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: -        filter_frame = frame.inner() -        filter_frame.symbols.analyze_node(node) -        self.enter_frame(filter_frame) -        self.buffer(filter_frame) -        self.blockvisit(node.body, filter_frame) -        self.start_write(frame, node) -        self.visit_Filter(node.filter, filter_frame) -        self.end_write(frame) -        self.leave_frame(filter_frame) - -    def visit_With(self, node: nodes.With, frame: Frame) -> None: -        with_frame = frame.inner() -        with_frame.symbols.analyze_node(node) -        self.enter_frame(with_frame) -        for target, expr in zip(node.targets, node.values): -            self.newline() -            self.visit(target, with_frame) -            self.write(" = ") -            self.visit(expr, frame) -        self.blockvisit(node.body, with_frame) -        self.leave_frame(with_frame) - -    def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: -        self.newline(node) -        self.visit(node.node, frame) - -    class _FinalizeInfo(t.NamedTuple): -        const: t.Optional[t.Callable[..., str]] -        src: t.Optional[str] - -    @staticmethod -    def _default_finalize(value: t.Any) -> t.Any: -        """The default finalize function if the environment isn't -        configured with one. Or, if the environment has one, this is -        called on that function's output for constants. -        """ -        return str(value) - -    _finalize: t.Optional[_FinalizeInfo] = None - -    def _make_finalize(self) -> _FinalizeInfo: -        """Build the finalize function to be used on constants and at -        runtime. Cached so it's only created once for all output nodes. - -        Returns a ``namedtuple`` with the following attributes: - -        ``const`` -            A function to finalize constant data at compile time. - -        ``src`` -            Source code to output around nodes to be evaluated at -            runtime. -        """ -        if self._finalize is not None: -            return self._finalize - -        finalize: t.Optional[t.Callable[..., t.Any]] -        finalize = default = self._default_finalize -        src = None - -        if self.environment.finalize: -            src = "environment.finalize(" -            env_finalize = self.environment.finalize -            pass_arg = { -                _PassArg.context: "context", -                _PassArg.eval_context: "context.eval_ctx", -                _PassArg.environment: "environment", -            }.get( -                _PassArg.from_obj(env_finalize)  # type: ignore -            ) -            finalize = None - -            if pass_arg is None: - -                def finalize(value: t.Any) -> t.Any:  # noqa: F811 -                    return default(env_finalize(value)) - -            else: -                src = f"{src}{pass_arg}, " - -                if pass_arg == "environment": - -                    def finalize(value: t.Any) -> t.Any:  # noqa: F811 -                        return default(env_finalize(self.environment, value)) - -        self._finalize = self._FinalizeInfo(finalize, src) -        return self._finalize - -    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: -        """Given a group of constant values converted from ``Output`` -        child nodes, produce a string to write to the template module -        source. -        """ -        return repr(concat(group)) - -    def _output_child_to_const( -        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo -    ) -> str: -        """Try to optimize a child of an ``Output`` node by trying to -        convert it to constant, finalized data at compile time. - -        If :exc:`Impossible` is raised, the node is not constant and -        will be evaluated at runtime. Any other exception will also be -        evaluated at runtime for easier debugging. -        """ -        const = node.as_const(frame.eval_ctx) - -        if frame.eval_ctx.autoescape: -            const = escape(const) - -        # Template data doesn't go through finalize. -        if isinstance(node, nodes.TemplateData): -            return str(const) - -        return finalize.const(const)  # type: ignore - -    def _output_child_pre( -        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo -    ) -> None: -        """Output extra source code before visiting a child of an -        ``Output`` node. -        """ -        if frame.eval_ctx.volatile: -            self.write("(escape if context.eval_ctx.autoescape else str)(") -        elif frame.eval_ctx.autoescape: -            self.write("escape(") -        else: -            self.write("str(") - -        if finalize.src is not None: -            self.write(finalize.src) - -    def _output_child_post( -        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo -    ) -> None: -        """Output extra source code after visiting a child of an -        ``Output`` node. -        """ -        self.write(")") - -        if finalize.src is not None: -            self.write(")") - -    def visit_Output(self, node: nodes.Output, frame: Frame) -> None: -        # If an extends is active, don't render outside a block. -        if frame.require_output_check: -            # A top-level extends is known to exist at compile time. -            if self.has_known_extends: -                return - -            self.writeline("if parent_template is None:") -            self.indent() - -        finalize = self._make_finalize() -        body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - -        # Evaluate constants at compile time if possible. Each item in -        # body will be either a list of static data or a node to be -        # evaluated at runtime. -        for child in node.nodes: -            try: -                if not ( -                    # If the finalize function requires runtime context, -                    # constants can't be evaluated at compile time. -                    finalize.const -                    # Unless it's basic template data that won't be -                    # finalized anyway. -                    or isinstance(child, nodes.TemplateData) -                ): -                    raise nodes.Impossible() - -                const = self._output_child_to_const(child, frame, finalize) -            except (nodes.Impossible, Exception): -                # The node was not constant and needs to be evaluated at -                # runtime. Or another error was raised, which is easier -                # to debug at runtime. -                body.append(child) -                continue - -            if body and isinstance(body[-1], list): -                body[-1].append(const) -            else: -                body.append([const]) - -        if frame.buffer is not None: -            if len(body) == 1: -                self.writeline(f"{frame.buffer}.append(") -            else: -                self.writeline(f"{frame.buffer}.extend((") - -            self.indent() - -        for item in body: -            if isinstance(item, list): -                # A group of constant data to join and output. -                val = self._output_const_repr(item) - -                if frame.buffer is None: -                    self.writeline("yield " + val) -                else: -                    self.writeline(val + ",") -            else: -                if frame.buffer is None: -                    self.writeline("yield ", item) -                else: -                    self.newline(item) - -                # A node to be evaluated at runtime. -                self._output_child_pre(item, frame, finalize) -                self.visit(item, frame) -                self._output_child_post(item, frame, finalize) - -                if frame.buffer is not None: -                    self.write(",") - -        if frame.buffer is not None: -            self.outdent() -            self.writeline(")" if len(body) == 1 else "))") - -        if frame.require_output_check: -            self.outdent() - -    def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: -        self.push_assign_tracking() -        self.newline(node) -        self.visit(node.target, frame) -        self.write(" = ") -        self.visit(node.node, frame) -        self.pop_assign_tracking(frame) - -    def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: -        self.push_assign_tracking() -        block_frame = frame.inner() -        # This is a special case.  Since a set block always captures we -        # will disable output checks.  This way one can use set blocks -        # toplevel even in extended templates. -        block_frame.require_output_check = False -        block_frame.symbols.analyze_node(node) -        self.enter_frame(block_frame) -        self.buffer(block_frame) -        self.blockvisit(node.body, block_frame) -        self.newline(node) -        self.visit(node.target, frame) -        self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") -        if node.filter is not None: -            self.visit_Filter(node.filter, block_frame) -        else: -            self.write(f"concat({block_frame.buffer})") -        self.write(")") -        self.pop_assign_tracking(frame) -        self.leave_frame(block_frame) - -    # -- Expression Visitors - -    def visit_Name(self, node: nodes.Name, frame: Frame) -> None: -        if node.ctx == "store" and ( -            frame.toplevel or frame.loop_frame or frame.block_frame -        ): -            if self._assign_stack: -                self._assign_stack[-1].add(node.name) -        ref = frame.symbols.ref(node.name) - -        # If we are looking up a variable we might have to deal with the -        # case where it's undefined.  We can skip that case if the load -        # instruction indicates a parameter which are always defined. -        if node.ctx == "load": -            load = frame.symbols.find_load(ref) -            if not ( -                load is not None -                and load[0] == VAR_LOAD_PARAMETER -                and not self.parameter_is_undeclared(ref) -            ): -                self.write( -                    f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" -                ) -                return - -        self.write(ref) - -    def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: -        # NSRefs can only be used to store values; since they use the normal -        # `foo.bar` notation they will be parsed as a normal attribute access -        # when used anywhere but in a `set` context -        ref = frame.symbols.ref(node.name) -        self.writeline(f"if not isinstance({ref}, Namespace):") -        self.indent() -        self.writeline( -            "raise TemplateRuntimeError" -            '("cannot assign attribute on non-namespace object")' -        ) -        self.outdent() -        self.writeline(f"{ref}[{node.attr!r}]") - -    def visit_Const(self, node: nodes.Const, frame: Frame) -> None: -        val = node.as_const(frame.eval_ctx) -        if isinstance(val, float): -            self.write(str(val)) -        else: -            self.write(repr(val)) - -    def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: -        try: -            self.write(repr(node.as_const(frame.eval_ctx))) -        except nodes.Impossible: -            self.write( -                f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" -            ) - -    def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: -        self.write("(") -        idx = -1 -        for idx, item in enumerate(node.items): -            if idx: -                self.write(", ") -            self.visit(item, frame) -        self.write(",)" if idx == 0 else ")") - -    def visit_List(self, node: nodes.List, frame: Frame) -> None: -        self.write("[") -        for idx, item in enumerate(node.items): -            if idx: -                self.write(", ") -            self.visit(item, frame) -        self.write("]") - -    def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: -        self.write("{") -        for idx, item in enumerate(node.items): -            if idx: -                self.write(", ") -            self.visit(item.key, frame) -            self.write(": ") -            self.visit(item.value, frame) -        self.write("}") - -    visit_Add = _make_binop("+") -    visit_Sub = _make_binop("-") -    visit_Mul = _make_binop("*") -    visit_Div = _make_binop("/") -    visit_FloorDiv = _make_binop("//") -    visit_Pow = _make_binop("**") -    visit_Mod = _make_binop("%") -    visit_And = _make_binop("and") -    visit_Or = _make_binop("or") -    visit_Pos = _make_unop("+") -    visit_Neg = _make_unop("-") -    visit_Not = _make_unop("not ") - -    @optimizeconst -    def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: -        if frame.eval_ctx.volatile: -            func_name = "(markup_join if context.eval_ctx.volatile else str_join)" -        elif frame.eval_ctx.autoescape: -            func_name = "markup_join" -        else: -            func_name = "str_join" -        self.write(f"{func_name}((") -        for arg in node.nodes: -            self.visit(arg, frame) -            self.write(", ") -        self.write("))") - -    @optimizeconst -    def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: -        self.write("(") -        self.visit(node.expr, frame) -        for op in node.ops: -            self.visit(op, frame) -        self.write(")") - -    def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: -        self.write(f" {operators[node.op]} ") -        self.visit(node.expr, frame) - -    @optimizeconst -    def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: -        if self.environment.is_async: -            self.write("(await auto_await(") - -        self.write("environment.getattr(") -        self.visit(node.node, frame) -        self.write(f", {node.attr!r})") - -        if self.environment.is_async: -            self.write("))") - -    @optimizeconst -    def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: -        # slices bypass the environment getitem method. -        if isinstance(node.arg, nodes.Slice): -            self.visit(node.node, frame) -            self.write("[") -            self.visit(node.arg, frame) -            self.write("]") -        else: -            if self.environment.is_async: -                self.write("(await auto_await(") - -            self.write("environment.getitem(") -            self.visit(node.node, frame) -            self.write(", ") -            self.visit(node.arg, frame) -            self.write(")") - -            if self.environment.is_async: -                self.write("))") - -    def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: -        if node.start is not None: -            self.visit(node.start, frame) -        self.write(":") -        if node.stop is not None: -            self.visit(node.stop, frame) -        if node.step is not None: -            self.write(":") -            self.visit(node.step, frame) - -    @contextmanager -    def _filter_test_common( -        self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool -    ) -> t.Iterator[None]: -        if self.environment.is_async: -            self.write("(await auto_await(") - -        if is_filter: -            self.write(f"{self.filters[node.name]}(") -            func = self.environment.filters.get(node.name) -        else: -            self.write(f"{self.tests[node.name]}(") -            func = self.environment.tests.get(node.name) - -        # When inside an If or CondExpr frame, allow the filter to be -        # undefined at compile time and only raise an error if it's -        # actually called at runtime. See pull_dependencies. -        if func is None and not frame.soft_frame: -            type_name = "filter" if is_filter else "test" -            self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - -        pass_arg = { -            _PassArg.context: "context", -            _PassArg.eval_context: "context.eval_ctx", -            _PassArg.environment: "environment", -        }.get( -            _PassArg.from_obj(func)  # type: ignore -        ) - -        if pass_arg is not None: -            self.write(f"{pass_arg}, ") - -        # Back to the visitor function to handle visiting the target of -        # the filter or test. -        yield - -        self.signature(node, frame) -        self.write(")") - -        if self.environment.is_async: -            self.write("))") - -    @optimizeconst -    def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: -        with self._filter_test_common(node, frame, True): -            # if the filter node is None we are inside a filter block -            # and want to write to the current buffer -            if node.node is not None: -                self.visit(node.node, frame) -            elif frame.eval_ctx.volatile: -                self.write( -                    f"(Markup(concat({frame.buffer}))" -                    f" if context.eval_ctx.autoescape else concat({frame.buffer}))" -                ) -            elif frame.eval_ctx.autoescape: -                self.write(f"Markup(concat({frame.buffer}))") -            else: -                self.write(f"concat({frame.buffer})") - -    @optimizeconst -    def visit_Test(self, node: nodes.Test, frame: Frame) -> None: -        with self._filter_test_common(node, frame, False): -            self.visit(node.node, frame) - -    @optimizeconst -    def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: -        frame = frame.soft() - -        def write_expr2() -> None: -            if node.expr2 is not None: -                self.visit(node.expr2, frame) -                return - -            self.write( -                f'cond_expr_undefined("the inline if-expression on' -                f" {self.position(node)} evaluated to false and no else" -                f' section was defined.")' -            ) - -        self.write("(") -        self.visit(node.expr1, frame) -        self.write(" if ") -        self.visit(node.test, frame) -        self.write(" else ") -        write_expr2() -        self.write(")") - -    @optimizeconst -    def visit_Call( -        self, node: nodes.Call, frame: Frame, forward_caller: bool = False -    ) -> None: -        if self.environment.is_async: -            self.write("(await auto_await(") -        if self.environment.sandboxed: -            self.write("environment.call(context, ") -        else: -            self.write("context.call(") -        self.visit(node.node, frame) -        extra_kwargs = {"caller": "caller"} if forward_caller else None -        loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} -        block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} -        if extra_kwargs: -            extra_kwargs.update(loop_kwargs, **block_kwargs) -        elif loop_kwargs or block_kwargs: -            extra_kwargs = dict(loop_kwargs, **block_kwargs) -        self.signature(node, frame, extra_kwargs) -        self.write(")") -        if self.environment.is_async: -            self.write("))") - -    def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: -        self.write(node.key + "=") -        self.visit(node.value, frame) - -    # -- Unused nodes for extensions - -    def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: -        self.write("Markup(") -        self.visit(node.expr, frame) -        self.write(")") - -    def visit_MarkSafeIfAutoescape( -        self, node: nodes.MarkSafeIfAutoescape, frame: Frame -    ) -> None: -        self.write("(Markup if context.eval_ctx.autoescape else identity)(") -        self.visit(node.expr, frame) -        self.write(")") - -    def visit_EnvironmentAttribute( -        self, node: nodes.EnvironmentAttribute, frame: Frame -    ) -> None: -        self.write("environment." + node.name) - -    def visit_ExtensionAttribute( -        self, node: nodes.ExtensionAttribute, frame: Frame -    ) -> None: -        self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - -    def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: -        self.write(self.import_aliases[node.importname]) - -    def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: -        self.write(node.name) - -    def visit_ContextReference( -        self, node: nodes.ContextReference, frame: Frame -    ) -> None: -        self.write("context") - -    def visit_DerivedContextReference( -        self, node: nodes.DerivedContextReference, frame: Frame -    ) -> None: -        self.write(self.derive_context(frame)) - -    def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: -        self.writeline("continue", node) - -    def visit_Break(self, node: nodes.Break, frame: Frame) -> None: -        self.writeline("break", node) - -    def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: -        scope_frame = frame.inner() -        scope_frame.symbols.analyze_node(node) -        self.enter_frame(scope_frame) -        self.blockvisit(node.body, scope_frame) -        self.leave_frame(scope_frame) - -    def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: -        ctx = self.temporary_identifier() -        self.writeline(f"{ctx} = {self.derive_context(frame)}") -        self.writeline(f"{ctx}.vars = ") -        self.visit(node.context, frame) -        self.push_context_reference(ctx) - -        scope_frame = frame.inner(isolated=True) -        scope_frame.symbols.analyze_node(node) -        self.enter_frame(scope_frame) -        self.blockvisit(node.body, scope_frame) -        self.leave_frame(scope_frame) -        self.pop_context_reference() - -    def visit_EvalContextModifier( -        self, node: nodes.EvalContextModifier, frame: Frame -    ) -> None: -        for keyword in node.options: -            self.writeline(f"context.eval_ctx.{keyword.key} = ") -            self.visit(keyword.value, frame) -            try: -                val = keyword.value.as_const(frame.eval_ctx) -            except nodes.Impossible: -                frame.eval_ctx.volatile = True -            else: -                setattr(frame.eval_ctx, keyword.key, val) - -    def visit_ScopedEvalContextModifier( -        self, node: nodes.ScopedEvalContextModifier, frame: Frame -    ) -> None: -        old_ctx_name = self.temporary_identifier() -        saved_ctx = frame.eval_ctx.save() -        self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") -        self.visit_EvalContextModifier(node, frame) -        for child in node.body: -            self.visit(child, frame) -        frame.eval_ctx.revert(saved_ctx) -        self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/venv/lib/python3.11/site-packages/jinja2/constants.py b/venv/lib/python3.11/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/venv/lib/python3.11/site-packages/jinja2/debug.py b/venv/lib/python3.11/site-packages/jinja2/debug.py deleted file mode 100644 index 7ed7e92..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: -    from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: -    """Rewrite the current exception to replace any tracebacks from -    within compiled template code with tracebacks that look like they -    came from the template source. - -    This must be called within an ``except`` block. - -    :param source: For ``TemplateSyntaxError``, the original source if -        known. -    :return: The original exception with the rewritten traceback. -    """ -    _, exc_value, tb = sys.exc_info() -    exc_value = t.cast(BaseException, exc_value) -    tb = t.cast(TracebackType, tb) - -    if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: -        exc_value.translated = True -        exc_value.source = source -        # Remove the old traceback, otherwise the frames from the -        # compiler still show up. -        exc_value.with_traceback(None) -        # Outside of runtime, so the frame isn't executing template -        # code, but it still needs to point at the template. -        tb = fake_traceback( -            exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno -        ) -    else: -        # Skip the frame for the render function. -        tb = tb.tb_next - -    stack = [] - -    # Build the stack of traceback object, replacing any in template -    # code with the source file and line information. -    while tb is not None: -        # Skip frames decorated with @internalcode. These are internal -        # calls that aren't useful in template debugging output. -        if tb.tb_frame.f_code in internal_code: -            tb = tb.tb_next -            continue - -        template = tb.tb_frame.f_globals.get("__jinja_template__") - -        if template is not None: -            lineno = template.get_corresponding_lineno(tb.tb_lineno) -            fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) -            stack.append(fake_tb) -        else: -            stack.append(tb) - -        tb = tb.tb_next - -    tb_next = None - -    # Assign tb_next in reverse to avoid circular references. -    for tb in reversed(stack): -        tb.tb_next = tb_next -        tb_next = tb - -    return exc_value.with_traceback(tb_next) - - -def fake_traceback(  # type: ignore -    exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: -    """Produce a new traceback object that looks like it came from the -    template source instead of the compiled code. The filename, line -    number, and location name will point to the template, and the local -    variables will be the current template context. - -    :param exc_value: The original exception to be re-raised to create -        the new traceback. -    :param tb: The original traceback to get the local variables and -        code info from. -    :param filename: The template filename. -    :param lineno: The line number in the template source. -    """ -    if tb is not None: -        # Replace the real locals with the context that would be -        # available at that point in the template. -        locals = get_template_locals(tb.tb_frame.f_locals) -        locals.pop("__jinja_exception__", None) -    else: -        locals = {} - -    globals = { -        "__name__": filename, -        "__file__": filename, -        "__jinja_exception__": exc_value, -    } -    # Raise an exception at the correct line number. -    code: CodeType = compile( -        "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" -    ) - -    # Build a new code object that points to the template file and -    # replaces the location with a block name. -    location = "template" - -    if tb is not None: -        function = tb.tb_frame.f_code.co_name - -        if function == "root": -            location = "top-level template code" -        elif function.startswith("block_"): -            location = f"block {function[6:]!r}" - -    if sys.version_info >= (3, 8): -        code = code.replace(co_name=location) -    else: -        code = CodeType( -            code.co_argcount, -            code.co_kwonlyargcount, -            code.co_nlocals, -            code.co_stacksize, -            code.co_flags, -            code.co_code, -            code.co_consts, -            code.co_names, -            code.co_varnames, -            code.co_filename, -            location, -            code.co_firstlineno, -            code.co_lnotab, -            code.co_freevars, -            code.co_cellvars, -        ) - -    # Execute the new code, which is guaranteed to raise, and return -    # the new traceback without this frame. -    try: -        exec(code, globals, locals) -    except BaseException: -        return sys.exc_info()[2].tb_next  # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: -    """Based on the runtime locals, get the context that would be -    available at that point in the template. -    """ -    # Start with the current template context. -    ctx: "t.Optional[Context]" = real_locals.get("context") - -    if ctx is not None: -        data: t.Dict[str, t.Any] = ctx.get_all().copy() -    else: -        data = {} - -    # Might be in a derived context that only sets local variables -    # rather than pushing a context. Local variables follow the scheme -    # l_depth_name. Find the highest-depth local that has a value for -    # each name. -    local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - -    for name, value in real_locals.items(): -        if not name.startswith("l_") or value is missing: -            # Not a template variable, or no longer relevant. -            continue - -        try: -            _, depth_str, name = name.split("_", 2) -            depth = int(depth_str) -        except ValueError: -            continue - -        cur_depth = local_overrides.get(name, (-1,))[0] - -        if cur_depth < depth: -            local_overrides[name] = (depth, value) - -    # Modify the context with any derived context. -    for name, (_, value) in local_overrides.items(): -        if value is missing: -            data.pop(name, None) -        else: -            data[name] = value - -    return data diff --git a/venv/lib/python3.11/site-packages/jinja2/defaults.py b/venv/lib/python3.11/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS  # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS  # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: -    import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { -    "range": range, -    "dict": dict, -    "lipsum": generate_lorem_ipsum, -    "cycler": Cycler, -    "joiner": Joiner, -    "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { -    "compiler.ascii_str": True, -    "urlize.rel": "noopener", -    "urlize.target": None, -    "urlize.extra_schemes": None, -    "truncate.leeway": 5, -    "json.dumps_function": None, -    "json.dumps_kwargs": {"sort_keys": True}, -    "ext.i18n.trimmed": False, -} diff --git a/venv/lib/python3.11/site-packages/jinja2/environment.py b/venv/lib/python3.11/site-packages/jinja2/environment.py deleted file mode 100644 index 185d332..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1667 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .bccache import BytecodeCache -    from .ext import Extension -    from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: -    """Return a new spontaneous environment. A spontaneous environment -    is used for templates created directly rather than through an -    existing environment. - -    :param cls: Environment class to create. -    :param args: Positional arguments passed to environment. -    """ -    env = cls(*args) -    env.shared = True -    return env - - -def create_cache( -    size: int, -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: -    """Return the cache class for the given size.""" -    if size == 0: -        return None - -    if size < 0: -        return {} - -    return LRUCache(size)  # type: ignore - - -def copy_cache( -    cache: t.Optional[t.MutableMapping], -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: -    """Create an empty copy of the given cache.""" -    if cache is None: -        return None - -    if type(cache) is dict: -        return {} - -    return LRUCache(cache.capacity)  # type: ignore - - -def load_extensions( -    environment: "Environment", -    extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: -    """Load the extensions from the list and bind it to the environment. -    Returns a dict of instantiated extensions. -    """ -    result = {} - -    for extension in extensions: -        if isinstance(extension, str): -            extension = t.cast(t.Type["Extension"], import_string(extension)) - -        result[extension.identifier] = extension(environment) - -    return result - - -def _environment_config_check(environment: "Environment") -> "Environment": -    """Perform a sanity check on the environment.""" -    assert issubclass( -        environment.undefined, Undefined -    ), "'undefined' must be a subclass of 'jinja2.Undefined'." -    assert ( -        environment.block_start_string -        != environment.variable_start_string -        != environment.comment_start_string -    ), "block, variable and comment start strings must be different." -    assert environment.newline_sequence in { -        "\r", -        "\r\n", -        "\n", -    }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." -    return environment - - -class Environment: -    r"""The core component of Jinja is the `Environment`.  It contains -    important shared variables like configuration, filters, tests, -    globals and others.  Instances of this class may be modified if -    they are not shared and if no template was loaded so far. -    Modifications on environments after the first template was loaded -    will lead to surprising effects and undefined behavior. - -    Here are the possible initialization parameters: - -        `block_start_string` -            The string marking the beginning of a block.  Defaults to ``'{%'``. - -        `block_end_string` -            The string marking the end of a block.  Defaults to ``'%}'``. - -        `variable_start_string` -            The string marking the beginning of a print statement. -            Defaults to ``'{{'``. - -        `variable_end_string` -            The string marking the end of a print statement.  Defaults to -            ``'}}'``. - -        `comment_start_string` -            The string marking the beginning of a comment.  Defaults to ``'{#'``. - -        `comment_end_string` -            The string marking the end of a comment.  Defaults to ``'#}'``. - -        `line_statement_prefix` -            If given and a string, this will be used as prefix for line based -            statements.  See also :ref:`line-statements`. - -        `line_comment_prefix` -            If given and a string, this will be used as prefix for line based -            comments.  See also :ref:`line-statements`. - -            .. versionadded:: 2.2 - -        `trim_blocks` -            If this is set to ``True`` the first newline after a block is -            removed (block, not variable tag!).  Defaults to `False`. - -        `lstrip_blocks` -            If this is set to ``True`` leading spaces and tabs are stripped -            from the start of a line to a block.  Defaults to `False`. - -        `newline_sequence` -            The sequence that starts a newline.  Must be one of ``'\r'``, -            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a -            useful default for Linux and OS X systems as well as web -            applications. - -        `keep_trailing_newline` -            Preserve the trailing newline when rendering templates. -            The default is ``False``, which causes a single newline, -            if present, to be stripped from the end of the template. - -            .. versionadded:: 2.7 - -        `extensions` -            List of Jinja extensions to use.  This can either be import paths -            as strings or extension classes.  For more information have a -            look at :ref:`the extensions documentation <jinja-extensions>`. - -        `optimized` -            should the optimizer be enabled?  Default is ``True``. - -        `undefined` -            :class:`Undefined` or a subclass of it that is used to represent -            undefined values in the template. - -        `finalize` -            A callable that can be used to process the result of a variable -            expression before it is output.  For example one can convert -            ``None`` implicitly into an empty string here. - -        `autoescape` -            If set to ``True`` the XML/HTML autoescaping feature is enabled by -            default.  For more details about autoescaping see -            :class:`~markupsafe.Markup`.  As of Jinja 2.4 this can also -            be a callable that is passed the template name and has to -            return ``True`` or ``False`` depending on autoescape should be -            enabled by default. - -            .. versionchanged:: 2.4 -               `autoescape` can now be a function - -        `loader` -            The template loader for this environment. - -        `cache_size` -            The size of the cache.  Per default this is ``400`` which means -            that if more than 400 templates are loaded the loader will clean -            out the least recently used template.  If the cache size is set to -            ``0`` templates are recompiled all the time, if the cache size is -            ``-1`` the cache will not be cleaned. - -            .. versionchanged:: 2.8 -               The cache size was increased to 400 from a low 50. - -        `auto_reload` -            Some loaders load templates from locations where the template -            sources may change (ie: file system or database).  If -            ``auto_reload`` is set to ``True`` (default) every time a template is -            requested the loader checks if the source changed and if yes, it -            will reload the template.  For higher performance it's possible to -            disable that. - -        `bytecode_cache` -            If set to a bytecode cache object, this object will provide a -            cache for the internal Jinja bytecode so that templates don't -            have to be parsed if they were not changed. - -            See :ref:`bytecode-cache` for more information. - -        `enable_async` -            If set to true this enables async template execution which -            allows using async functions and generators. -    """ - -    #: if this environment is sandboxed.  Modifying this variable won't make -    #: the environment sandboxed though.  For a real sandboxed environment -    #: have a look at jinja2.sandbox.  This flag alone controls the code -    #: generation by the compiler. -    sandboxed = False - -    #: True if the environment is just an overlay -    overlayed = False - -    #: the environment this environment is linked to if it is an overlay -    linked_to: t.Optional["Environment"] = None - -    #: shared environments have this set to `True`.  A shared environment -    #: must not be modified -    shared = False - -    #: the class that is used for code generation.  See -    #: :class:`~jinja2.compiler.CodeGenerator` for more information. -    code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - -    concat = "".join - -    #: the context class that is used for templates.  See -    #: :class:`~jinja2.runtime.Context` for more information. -    context_class: t.Type[Context] = Context - -    template_class: t.Type["Template"] - -    def __init__( -        self, -        block_start_string: str = BLOCK_START_STRING, -        block_end_string: str = BLOCK_END_STRING, -        variable_start_string: str = VARIABLE_START_STRING, -        variable_end_string: str = VARIABLE_END_STRING, -        comment_start_string: str = COMMENT_START_STRING, -        comment_end_string: str = COMMENT_END_STRING, -        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, -        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, -        trim_blocks: bool = TRIM_BLOCKS, -        lstrip_blocks: bool = LSTRIP_BLOCKS, -        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, -        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, -        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), -        optimized: bool = True, -        undefined: t.Type[Undefined] = Undefined, -        finalize: t.Optional[t.Callable[..., t.Any]] = None, -        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, -        loader: t.Optional["BaseLoader"] = None, -        cache_size: int = 400, -        auto_reload: bool = True, -        bytecode_cache: t.Optional["BytecodeCache"] = None, -        enable_async: bool = False, -    ): -        # !!Important notice!! -        #   The constructor accepts quite a few arguments that should be -        #   passed by keyword rather than position.  However it's important to -        #   not change the order of arguments because it's used at least -        #   internally in those cases: -        #       -   spontaneous environments (i18n extension and Template) -        #       -   unittests -        #   If parameter changes are required only add parameters at the end -        #   and don't change the arguments (or the defaults!) of the arguments -        #   existing already. - -        # lexer / parser information -        self.block_start_string = block_start_string -        self.block_end_string = block_end_string -        self.variable_start_string = variable_start_string -        self.variable_end_string = variable_end_string -        self.comment_start_string = comment_start_string -        self.comment_end_string = comment_end_string -        self.line_statement_prefix = line_statement_prefix -        self.line_comment_prefix = line_comment_prefix -        self.trim_blocks = trim_blocks -        self.lstrip_blocks = lstrip_blocks -        self.newline_sequence = newline_sequence -        self.keep_trailing_newline = keep_trailing_newline - -        # runtime information -        self.undefined: t.Type[Undefined] = undefined -        self.optimized = optimized -        self.finalize = finalize -        self.autoescape = autoescape - -        # defaults -        self.filters = DEFAULT_FILTERS.copy() -        self.tests = DEFAULT_TESTS.copy() -        self.globals = DEFAULT_NAMESPACE.copy() - -        # set the loader provided -        self.loader = loader -        self.cache = create_cache(cache_size) -        self.bytecode_cache = bytecode_cache -        self.auto_reload = auto_reload - -        # configurable policies -        self.policies = DEFAULT_POLICIES.copy() - -        # load extensions -        self.extensions = load_extensions(self, extensions) - -        self.is_async = enable_async -        _environment_config_check(self) - -    def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: -        """Adds an extension after the environment was created. - -        .. versionadded:: 2.5 -        """ -        self.extensions.update(load_extensions(self, [extension])) - -    def extend(self, **attributes: t.Any) -> None: -        """Add the items to the instance of the environment if they do not exist -        yet.  This is used by :ref:`extensions <writing-extensions>` to register -        callbacks and configuration values without breaking inheritance. -        """ -        for key, value in attributes.items(): -            if not hasattr(self, key): -                setattr(self, key, value) - -    def overlay( -        self, -        block_start_string: str = missing, -        block_end_string: str = missing, -        variable_start_string: str = missing, -        variable_end_string: str = missing, -        comment_start_string: str = missing, -        comment_end_string: str = missing, -        line_statement_prefix: t.Optional[str] = missing, -        line_comment_prefix: t.Optional[str] = missing, -        trim_blocks: bool = missing, -        lstrip_blocks: bool = missing, -        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, -        keep_trailing_newline: bool = missing, -        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, -        optimized: bool = missing, -        undefined: t.Type[Undefined] = missing, -        finalize: t.Optional[t.Callable[..., t.Any]] = missing, -        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, -        loader: t.Optional["BaseLoader"] = missing, -        cache_size: int = missing, -        auto_reload: bool = missing, -        bytecode_cache: t.Optional["BytecodeCache"] = missing, -        enable_async: bool = False, -    ) -> "Environment": -        """Create a new overlay environment that shares all the data with the -        current environment except for cache and the overridden attributes. -        Extensions cannot be removed for an overlayed environment.  An overlayed -        environment automatically gets all the extensions of the environment it -        is linked to plus optional extra extensions. - -        Creating overlays should happen after the initial environment was set -        up completely.  Not all attributes are truly linked, some are just -        copied over so modifications on the original environment may not shine -        through. - -        .. versionchanged:: 3.1.2 -            Added the ``newline_sequence``,, ``keep_trailing_newline``, -            and ``enable_async`` parameters to match ``__init__``. -        """ -        args = dict(locals()) -        del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - -        rv = object.__new__(self.__class__) -        rv.__dict__.update(self.__dict__) -        rv.overlayed = True -        rv.linked_to = self - -        for key, value in args.items(): -            if value is not missing: -                setattr(rv, key, value) - -        if cache_size is not missing: -            rv.cache = create_cache(cache_size) -        else: -            rv.cache = copy_cache(self.cache) - -        rv.extensions = {} -        for key, value in self.extensions.items(): -            rv.extensions[key] = value.bind(rv) -        if extensions is not missing: -            rv.extensions.update(load_extensions(rv, extensions)) - -        if enable_async is not missing: -            rv.is_async = enable_async - -        return _environment_config_check(rv) - -    @property -    def lexer(self) -> Lexer: -        """The lexer for this environment.""" -        return get_lexer(self) - -    def iter_extensions(self) -> t.Iterator["Extension"]: -        """Iterates over the extensions by priority.""" -        return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - -    def getitem( -        self, obj: t.Any, argument: t.Union[str, t.Any] -    ) -> t.Union[t.Any, Undefined]: -        """Get an item or attribute of an object but prefer the item.""" -        try: -            return obj[argument] -        except (AttributeError, TypeError, LookupError): -            if isinstance(argument, str): -                try: -                    attr = str(argument) -                except Exception: -                    pass -                else: -                    try: -                        return getattr(obj, attr) -                    except AttributeError: -                        pass -            return self.undefined(obj=obj, name=argument) - -    def getattr(self, obj: t.Any, attribute: str) -> t.Any: -        """Get an item or attribute of an object but prefer the attribute. -        Unlike :meth:`getitem` the attribute *must* be a string. -        """ -        try: -            return getattr(obj, attribute) -        except AttributeError: -            pass -        try: -            return obj[attribute] -        except (TypeError, LookupError, AttributeError): -            return self.undefined(obj=obj, name=attribute) - -    def _filter_test_common( -        self, -        name: t.Union[str, Undefined], -        value: t.Any, -        args: t.Optional[t.Sequence[t.Any]], -        kwargs: t.Optional[t.Mapping[str, t.Any]], -        context: t.Optional[Context], -        eval_ctx: t.Optional[EvalContext], -        is_filter: bool, -    ) -> t.Any: -        if is_filter: -            env_map = self.filters -            type_name = "filter" -        else: -            env_map = self.tests -            type_name = "test" - -        func = env_map.get(name)  # type: ignore - -        if func is None: -            msg = f"No {type_name} named {name!r}." - -            if isinstance(name, Undefined): -                try: -                    name._fail_with_undefined_error() -                except Exception as e: -                    msg = f"{msg} ({e}; did you forget to quote the callable name?)" - -            raise TemplateRuntimeError(msg) - -        args = [value, *(args if args is not None else ())] -        kwargs = kwargs if kwargs is not None else {} -        pass_arg = _PassArg.from_obj(func) - -        if pass_arg is _PassArg.context: -            if context is None: -                raise TemplateRuntimeError( -                    f"Attempted to invoke a context {type_name} without context." -                ) - -            args.insert(0, context) -        elif pass_arg is _PassArg.eval_context: -            if eval_ctx is None: -                if context is not None: -                    eval_ctx = context.eval_ctx -                else: -                    eval_ctx = EvalContext(self) - -            args.insert(0, eval_ctx) -        elif pass_arg is _PassArg.environment: -            args.insert(0, self) - -        return func(*args, **kwargs) - -    def call_filter( -        self, -        name: str, -        value: t.Any, -        args: t.Optional[t.Sequence[t.Any]] = None, -        kwargs: t.Optional[t.Mapping[str, t.Any]] = None, -        context: t.Optional[Context] = None, -        eval_ctx: t.Optional[EvalContext] = None, -    ) -> t.Any: -        """Invoke a filter on a value the same way the compiler does. - -        This might return a coroutine if the filter is running from an -        environment in async mode and the filter supports async -        execution. It's your responsibility to await this if needed. - -        .. versionadded:: 2.7 -        """ -        return self._filter_test_common( -            name, value, args, kwargs, context, eval_ctx, True -        ) - -    def call_test( -        self, -        name: str, -        value: t.Any, -        args: t.Optional[t.Sequence[t.Any]] = None, -        kwargs: t.Optional[t.Mapping[str, t.Any]] = None, -        context: t.Optional[Context] = None, -        eval_ctx: t.Optional[EvalContext] = None, -    ) -> t.Any: -        """Invoke a test on a value the same way the compiler does. - -        This might return a coroutine if the test is running from an -        environment in async mode and the test supports async execution. -        It's your responsibility to await this if needed. - -        .. versionchanged:: 3.0 -            Tests support ``@pass_context``, etc. decorators. Added -            the ``context`` and ``eval_ctx`` parameters. - -        .. versionadded:: 2.7 -        """ -        return self._filter_test_common( -            name, value, args, kwargs, context, eval_ctx, False -        ) - -    @internalcode -    def parse( -        self, -        source: str, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -    ) -> nodes.Template: -        """Parse the sourcecode and return the abstract syntax tree.  This -        tree of nodes is used by the compiler to convert the template into -        executable source- or bytecode.  This is useful for debugging or to -        extract information from templates. - -        If you are :ref:`developing Jinja extensions <writing-extensions>` -        this gives you a good overview of the node tree generated. -        """ -        try: -            return self._parse(source, name, filename) -        except TemplateSyntaxError: -            self.handle_exception(source=source) - -    def _parse( -        self, source: str, name: t.Optional[str], filename: t.Optional[str] -    ) -> nodes.Template: -        """Internal parsing function used by `parse` and `compile`.""" -        return Parser(self, source, name, filename).parse() - -    def lex( -        self, -        source: str, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -    ) -> t.Iterator[t.Tuple[int, str, str]]: -        """Lex the given sourcecode and return a generator that yields -        tokens as tuples in the form ``(lineno, token_type, value)``. -        This can be useful for :ref:`extension development <writing-extensions>` -        and debugging templates. - -        This does not perform preprocessing.  If you want the preprocessing -        of the extensions to be applied you have to filter source through -        the :meth:`preprocess` method. -        """ -        source = str(source) -        try: -            return self.lexer.tokeniter(source, name, filename) -        except TemplateSyntaxError: -            self.handle_exception(source=source) - -    def preprocess( -        self, -        source: str, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -    ) -> str: -        """Preprocesses the source with all extensions.  This is automatically -        called for all parsing and compiling methods but *not* for :meth:`lex` -        because there you usually only want the actual source tokenized. -        """ -        return reduce( -            lambda s, e: e.preprocess(s, name, filename), -            self.iter_extensions(), -            str(source), -        ) - -    def _tokenize( -        self, -        source: str, -        name: t.Optional[str], -        filename: t.Optional[str] = None, -        state: t.Optional[str] = None, -    ) -> TokenStream: -        """Called by the parser to do the preprocessing and filtering -        for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`. -        """ -        source = self.preprocess(source, name, filename) -        stream = self.lexer.tokenize(source, name, filename, state) - -        for ext in self.iter_extensions(): -            stream = ext.filter_stream(stream)  # type: ignore - -            if not isinstance(stream, TokenStream): -                stream = TokenStream(stream, name, filename)  # type: ignore - -        return stream - -    def _generate( -        self, -        source: nodes.Template, -        name: t.Optional[str], -        filename: t.Optional[str], -        defer_init: bool = False, -    ) -> str: -        """Internal hook that can be overridden to hook a different generate -        method in. - -        .. versionadded:: 2.5 -        """ -        return generate(  # type: ignore -            source, -            self, -            name, -            filename, -            defer_init=defer_init, -            optimized=self.optimized, -        ) - -    def _compile(self, source: str, filename: str) -> CodeType: -        """Internal hook that can be overridden to hook a different compile -        method in. - -        .. versionadded:: 2.5 -        """ -        return compile(source, filename, "exec") - -    @typing.overload -    def compile(  # type: ignore -        self, -        source: t.Union[str, nodes.Template], -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -        raw: "te.Literal[False]" = False, -        defer_init: bool = False, -    ) -> CodeType: -        ... - -    @typing.overload -    def compile( -        self, -        source: t.Union[str, nodes.Template], -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -        raw: "te.Literal[True]" = ..., -        defer_init: bool = False, -    ) -> str: -        ... - -    @internalcode -    def compile( -        self, -        source: t.Union[str, nodes.Template], -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -        raw: bool = False, -        defer_init: bool = False, -    ) -> t.Union[str, CodeType]: -        """Compile a node or template source code.  The `name` parameter is -        the load name of the template after it was joined using -        :meth:`join_path` if necessary, not the filename on the file system. -        the `filename` parameter is the estimated filename of the template on -        the file system.  If the template came from a database or memory this -        can be omitted. - -        The return value of this method is a python code object.  If the `raw` -        parameter is `True` the return value will be a string with python -        code equivalent to the bytecode returned otherwise.  This method is -        mainly used internally. - -        `defer_init` is use internally to aid the module code generator.  This -        causes the generated code to be able to import without the global -        environment variable to be set. - -        .. versionadded:: 2.4 -           `defer_init` parameter added. -        """ -        source_hint = None -        try: -            if isinstance(source, str): -                source_hint = source -                source = self._parse(source, name, filename) -            source = self._generate(source, name, filename, defer_init=defer_init) -            if raw: -                return source -            if filename is None: -                filename = "<template>" -            return self._compile(source, filename) -        except TemplateSyntaxError: -            self.handle_exception(source=source_hint) - -    def compile_expression( -        self, source: str, undefined_to_none: bool = True -    ) -> "TemplateExpression": -        """A handy helper method that returns a callable that accepts keyword -        arguments that appear as variables in the expression.  If called it -        returns the result of the expression. - -        This is useful if applications want to use the same rules as Jinja -        in template "configuration files" or similar situations. - -        Example usage: - -        >>> env = Environment() -        >>> expr = env.compile_expression('foo == 42') -        >>> expr(foo=23) -        False -        >>> expr(foo=42) -        True - -        Per default the return value is converted to `None` if the -        expression returns an undefined value.  This can be changed -        by setting `undefined_to_none` to `False`. - -        >>> env.compile_expression('var')() is None -        True -        >>> env.compile_expression('var', undefined_to_none=False)() -        Undefined - -        .. versionadded:: 2.1 -        """ -        parser = Parser(self, source, state="variable") -        try: -            expr = parser.parse_expression() -            if not parser.stream.eos: -                raise TemplateSyntaxError( -                    "chunk after expression", parser.stream.current.lineno, None, None -                ) -            expr.set_environment(self) -        except TemplateSyntaxError: -            self.handle_exception(source=source) - -        body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)] -        template = self.from_string(nodes.Template(body, lineno=1)) -        return TemplateExpression(template, undefined_to_none) - -    def compile_templates( -        self, -        target: t.Union[str, os.PathLike], -        extensions: t.Optional[t.Collection[str]] = None, -        filter_func: t.Optional[t.Callable[[str], bool]] = None, -        zip: t.Optional[str] = "deflated", -        log_function: t.Optional[t.Callable[[str], None]] = None, -        ignore_errors: bool = True, -    ) -> None: -        """Finds all the templates the loader can find, compiles them -        and stores them in `target`.  If `zip` is `None`, instead of in a -        zipfile, the templates will be stored in a directory. -        By default a deflate zip algorithm is used. To switch to -        the stored algorithm, `zip` can be set to ``'stored'``. - -        `extensions` and `filter_func` are passed to :meth:`list_templates`. -        Each template returned will be compiled to the target folder or -        zipfile. - -        By default template compilation errors are ignored.  In case a -        log function is provided, errors are logged.  If you want template -        syntax errors to abort the compilation you can set `ignore_errors` -        to `False` and you will get an exception on syntax errors. - -        .. versionadded:: 2.4 -        """ -        from .loaders import ModuleLoader - -        if log_function is None: - -            def log_function(x: str) -> None: -                pass - -        assert log_function is not None -        assert self.loader is not None, "No loader configured." - -        def write_file(filename: str, data: str) -> None: -            if zip: -                info = ZipInfo(filename) -                info.external_attr = 0o755 << 16 -                zip_file.writestr(info, data) -            else: -                with open(os.path.join(target, filename), "wb") as f: -                    f.write(data.encode("utf8")) - -        if zip is not None: -            from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED - -            zip_file = ZipFile( -                target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip] -            ) -            log_function(f"Compiling into Zip archive {target!r}") -        else: -            if not os.path.isdir(target): -                os.makedirs(target) -            log_function(f"Compiling into folder {target!r}") - -        try: -            for name in self.list_templates(extensions, filter_func): -                source, filename, _ = self.loader.get_source(self, name) -                try: -                    code = self.compile(source, name, filename, True, True) -                except TemplateSyntaxError as e: -                    if not ignore_errors: -                        raise -                    log_function(f'Could not compile "{name}": {e}') -                    continue - -                filename = ModuleLoader.get_module_filename(name) - -                write_file(filename, code) -                log_function(f'Compiled "{name}" as {filename}') -        finally: -            if zip: -                zip_file.close() - -        log_function("Finished compiling templates") - -    def list_templates( -        self, -        extensions: t.Optional[t.Collection[str]] = None, -        filter_func: t.Optional[t.Callable[[str], bool]] = None, -    ) -> t.List[str]: -        """Returns a list of templates for this environment.  This requires -        that the loader supports the loader's -        :meth:`~BaseLoader.list_templates` method. - -        If there are other files in the template folder besides the -        actual templates, the returned list can be filtered.  There are two -        ways: either `extensions` is set to a list of file extensions for -        templates, or a `filter_func` can be provided which is a callable that -        is passed a template name and should return `True` if it should end up -        in the result list. - -        If the loader does not support that, a :exc:`TypeError` is raised. - -        .. versionadded:: 2.4 -        """ -        assert self.loader is not None, "No loader configured." -        names = self.loader.list_templates() - -        if extensions is not None: -            if filter_func is not None: -                raise TypeError( -                    "either extensions or filter_func can be passed, but not both" -                ) - -            def filter_func(x: str) -> bool: -                return "." in x and x.rsplit(".", 1)[1] in extensions - -        if filter_func is not None: -            names = [name for name in names if filter_func(name)] - -        return names - -    def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn": -        """Exception handling helper.  This is used internally to either raise -        rewritten exceptions or return a rendered traceback for the template. -        """ -        from .debug import rewrite_traceback_stack - -        raise rewrite_traceback_stack(source=source) - -    def join_path(self, template: str, parent: str) -> str: -        """Join a template with the parent.  By default all the lookups are -        relative to the loader root so this method returns the `template` -        parameter unchanged, but if the paths should be relative to the -        parent template, this function can be used to calculate the real -        template name. - -        Subclasses may override this method and implement template path -        joining here. -        """ -        return template - -    @internalcode -    def _load_template( -        self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]] -    ) -> "Template": -        if self.loader is None: -            raise TypeError("no loader for this environment specified") -        cache_key = (weakref.ref(self.loader), name) -        if self.cache is not None: -            template = self.cache.get(cache_key) -            if template is not None and ( -                not self.auto_reload or template.is_up_to_date -            ): -                # template.globals is a ChainMap, modifying it will only -                # affect the template, not the environment globals. -                if globals: -                    template.globals.update(globals) - -                return template - -        template = self.loader.load(self, name, self.make_globals(globals)) - -        if self.cache is not None: -            self.cache[cache_key] = template -        return template - -    @internalcode -    def get_template( -        self, -        name: t.Union[str, "Template"], -        parent: t.Optional[str] = None, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        """Load a template by name with :attr:`loader` and return a -        :class:`Template`. If the template does not exist a -        :exc:`TemplateNotFound` exception is raised. - -        :param name: Name of the template to load. When loading -            templates from the filesystem, "/" is used as the path -            separator, even on Windows. -        :param parent: The name of the parent template importing this -            template. :meth:`join_path` can be used to implement name -            transformations with this. -        :param globals: Extend the environment :attr:`globals` with -            these extra variables available for all renders of this -            template. If the template has already been loaded and -            cached, its globals are updated with any new items. - -        .. versionchanged:: 3.0 -            If a template is loaded from cache, ``globals`` will update -            the template's globals instead of ignoring the new values. - -        .. versionchanged:: 2.4 -            If ``name`` is a :class:`Template` object it is returned -            unchanged. -        """ -        if isinstance(name, Template): -            return name -        if parent is not None: -            name = self.join_path(name, parent) - -        return self._load_template(name, globals) - -    @internalcode -    def select_template( -        self, -        names: t.Iterable[t.Union[str, "Template"]], -        parent: t.Optional[str] = None, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        """Like :meth:`get_template`, but tries loading multiple names. -        If none of the names can be loaded a :exc:`TemplatesNotFound` -        exception is raised. - -        :param names: List of template names to try loading in order. -        :param parent: The name of the parent template importing this -            template. :meth:`join_path` can be used to implement name -            transformations with this. -        :param globals: Extend the environment :attr:`globals` with -            these extra variables available for all renders of this -            template. If the template has already been loaded and -            cached, its globals are updated with any new items. - -        .. versionchanged:: 3.0 -            If a template is loaded from cache, ``globals`` will update -            the template's globals instead of ignoring the new values. - -        .. versionchanged:: 2.11 -            If ``names`` is :class:`Undefined`, an :exc:`UndefinedError` -            is raised instead. If no templates were found and ``names`` -            contains :class:`Undefined`, the message is more helpful. - -        .. versionchanged:: 2.4 -            If ``names`` contains a :class:`Template` object it is -            returned unchanged. - -        .. versionadded:: 2.3 -        """ -        if isinstance(names, Undefined): -            names._fail_with_undefined_error() - -        if not names: -            raise TemplatesNotFound( -                message="Tried to select from an empty list of templates." -            ) - -        for name in names: -            if isinstance(name, Template): -                return name -            if parent is not None: -                name = self.join_path(name, parent) -            try: -                return self._load_template(name, globals) -            except (TemplateNotFound, UndefinedError): -                pass -        raise TemplatesNotFound(names)  # type: ignore - -    @internalcode -    def get_or_select_template( -        self, -        template_name_or_list: t.Union[ -            str, "Template", t.List[t.Union[str, "Template"]] -        ], -        parent: t.Optional[str] = None, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        """Use :meth:`select_template` if an iterable of template names -        is given, or :meth:`get_template` if one name is given. - -        .. versionadded:: 2.3 -        """ -        if isinstance(template_name_or_list, (str, Undefined)): -            return self.get_template(template_name_or_list, parent, globals) -        elif isinstance(template_name_or_list, Template): -            return template_name_or_list -        return self.select_template(template_name_or_list, parent, globals) - -    def from_string( -        self, -        source: t.Union[str, nodes.Template], -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -        template_class: t.Optional[t.Type["Template"]] = None, -    ) -> "Template": -        """Load a template from a source string without using -        :attr:`loader`. - -        :param source: Jinja source to compile into a template. -        :param globals: Extend the environment :attr:`globals` with -            these extra variables available for all renders of this -            template. If the template has already been loaded and -            cached, its globals are updated with any new items. -        :param template_class: Return an instance of this -            :class:`Template` class. -        """ -        gs = self.make_globals(globals) -        cls = template_class or self.template_class -        return cls.from_code(self, self.compile(source), gs, None) - -    def make_globals( -        self, d: t.Optional[t.MutableMapping[str, t.Any]] -    ) -> t.MutableMapping[str, t.Any]: -        """Make the globals map for a template. Any given template -        globals overlay the environment :attr:`globals`. - -        Returns a :class:`collections.ChainMap`. This allows any changes -        to a template's globals to only affect that template, while -        changes to the environment's globals are still reflected. -        However, avoid modifying any globals after a template is loaded. - -        :param d: Dict of template-specific globals. - -        .. versionchanged:: 3.0 -            Use :class:`collections.ChainMap` to always prevent mutating -            environment globals. -        """ -        if d is None: -            d = {} - -        return ChainMap(d, self.globals) - - -class Template: -    """A compiled template that can be rendered. - -    Use the methods on :class:`Environment` to create or load templates. -    The environment is used to configure how templates are compiled and -    behave. - -    It is also possible to create a template object directly. This is -    not usually recommended. The constructor takes most of the same -    arguments as :class:`Environment`. All templates created with the -    same environment arguments share the same ephemeral ``Environment`` -    instance behind the scenes. - -    A template object should be considered immutable. Modifications on -    the object are not supported. -    """ - -    #: Type of environment to create when creating a template directly -    #: rather than through an existing environment. -    environment_class: t.Type[Environment] = Environment - -    environment: Environment -    globals: t.MutableMapping[str, t.Any] -    name: t.Optional[str] -    filename: t.Optional[str] -    blocks: t.Dict[str, t.Callable[[Context], t.Iterator[str]]] -    root_render_func: t.Callable[[Context], t.Iterator[str]] -    _module: t.Optional["TemplateModule"] -    _debug_info: str -    _uptodate: t.Optional[t.Callable[[], bool]] - -    def __new__( -        cls, -        source: t.Union[str, nodes.Template], -        block_start_string: str = BLOCK_START_STRING, -        block_end_string: str = BLOCK_END_STRING, -        variable_start_string: str = VARIABLE_START_STRING, -        variable_end_string: str = VARIABLE_END_STRING, -        comment_start_string: str = COMMENT_START_STRING, -        comment_end_string: str = COMMENT_END_STRING, -        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, -        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, -        trim_blocks: bool = TRIM_BLOCKS, -        lstrip_blocks: bool = LSTRIP_BLOCKS, -        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, -        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, -        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), -        optimized: bool = True, -        undefined: t.Type[Undefined] = Undefined, -        finalize: t.Optional[t.Callable[..., t.Any]] = None, -        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, -        enable_async: bool = False, -    ) -> t.Any:  # it returns a `Template`, but this breaks the sphinx build... -        env = get_spontaneous_environment( -            cls.environment_class,  # type: ignore -            block_start_string, -            block_end_string, -            variable_start_string, -            variable_end_string, -            comment_start_string, -            comment_end_string, -            line_statement_prefix, -            line_comment_prefix, -            trim_blocks, -            lstrip_blocks, -            newline_sequence, -            keep_trailing_newline, -            frozenset(extensions), -            optimized, -            undefined,  # type: ignore -            finalize, -            autoescape, -            None, -            0, -            False, -            None, -            enable_async, -        ) -        return env.from_string(source, template_class=cls) - -    @classmethod -    def from_code( -        cls, -        environment: Environment, -        code: CodeType, -        globals: t.MutableMapping[str, t.Any], -        uptodate: t.Optional[t.Callable[[], bool]] = None, -    ) -> "Template": -        """Creates a template object from compiled code and the globals.  This -        is used by the loaders and environment to create a template object. -        """ -        namespace = {"environment": environment, "__file__": code.co_filename} -        exec(code, namespace) -        rv = cls._from_namespace(environment, namespace, globals) -        rv._uptodate = uptodate -        return rv - -    @classmethod -    def from_module_dict( -        cls, -        environment: Environment, -        module_dict: t.MutableMapping[str, t.Any], -        globals: t.MutableMapping[str, t.Any], -    ) -> "Template": -        """Creates a template object from a module.  This is used by the -        module loader to create a template object. - -        .. versionadded:: 2.4 -        """ -        return cls._from_namespace(environment, module_dict, globals) - -    @classmethod -    def _from_namespace( -        cls, -        environment: Environment, -        namespace: t.MutableMapping[str, t.Any], -        globals: t.MutableMapping[str, t.Any], -    ) -> "Template": -        t: "Template" = object.__new__(cls) -        t.environment = environment -        t.globals = globals -        t.name = namespace["name"] -        t.filename = namespace["__file__"] -        t.blocks = namespace["blocks"] - -        # render function and module -        t.root_render_func = namespace["root"] -        t._module = None - -        # debug and loader helpers -        t._debug_info = namespace["debug_info"] -        t._uptodate = None - -        # store the reference -        namespace["environment"] = environment -        namespace["__jinja_template__"] = t - -        return t - -    def render(self, *args: t.Any, **kwargs: t.Any) -> str: -        """This method accepts the same arguments as the `dict` constructor: -        A dict, a dict subclass or some keyword arguments.  If no arguments -        are given the context will be empty.  These two calls do the same:: - -            template.render(knights='that say nih') -            template.render({'knights': 'that say nih'}) - -        This will return the rendered template as a string. -        """ -        if self.environment.is_async: -            import asyncio - -            close = False - -            try: -                loop = asyncio.get_running_loop() -            except RuntimeError: -                loop = asyncio.new_event_loop() -                close = True - -            try: -                return loop.run_until_complete(self.render_async(*args, **kwargs)) -            finally: -                if close: -                    loop.close() - -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            return self.environment.concat(self.root_render_func(ctx))  # type: ignore -        except Exception: -            self.environment.handle_exception() - -    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> str: -        """This works similar to :meth:`render` but returns a coroutine -        that when awaited returns the entire rendered template string.  This -        requires the async feature to be enabled. - -        Example usage:: - -            await template.render_async(knights='that say nih; asynchronously') -        """ -        if not self.environment.is_async: -            raise RuntimeError( -                "The environment was not created with async mode enabled." -            ) - -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            return self.environment.concat(  # type: ignore -                [n async for n in self.root_render_func(ctx)]  # type: ignore -            ) -        except Exception: -            return self.environment.handle_exception() - -    def stream(self, *args: t.Any, **kwargs: t.Any) -> "TemplateStream": -        """Works exactly like :meth:`generate` but returns a -        :class:`TemplateStream`. -        """ -        return TemplateStream(self.generate(*args, **kwargs)) - -    def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]: -        """For very large templates it can be useful to not render the whole -        template at once but evaluate each statement after another and yield -        piece for piece.  This method basically does exactly that and returns -        a generator that yields one item after another as strings. - -        It accepts the same arguments as :meth:`render`. -        """ -        if self.environment.is_async: -            import asyncio - -            async def to_list() -> t.List[str]: -                return [x async for x in self.generate_async(*args, **kwargs)] - -            yield from asyncio.run(to_list()) -            return - -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            yield from self.root_render_func(ctx) -        except Exception: -            yield self.environment.handle_exception() - -    async def generate_async( -        self, *args: t.Any, **kwargs: t.Any -    ) -> t.AsyncIterator[str]: -        """An async version of :meth:`generate`.  Works very similarly but -        returns an async iterator instead. -        """ -        if not self.environment.is_async: -            raise RuntimeError( -                "The environment was not created with async mode enabled." -            ) - -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            async for event in self.root_render_func(ctx):  # type: ignore -                yield event -        except Exception: -            yield self.environment.handle_exception() - -    def new_context( -        self, -        vars: t.Optional[t.Dict[str, t.Any]] = None, -        shared: bool = False, -        locals: t.Optional[t.Mapping[str, t.Any]] = None, -    ) -> Context: -        """Create a new :class:`Context` for this template.  The vars -        provided will be passed to the template.  Per default the globals -        are added to the context.  If shared is set to `True` the data -        is passed as is to the context without adding the globals. - -        `locals` can be a dict of local variables for internal usage. -        """ -        return new_context( -            self.environment, self.name, self.blocks, vars, shared, self.globals, locals -        ) - -    def make_module( -        self, -        vars: t.Optional[t.Dict[str, t.Any]] = None, -        shared: bool = False, -        locals: t.Optional[t.Mapping[str, t.Any]] = None, -    ) -> "TemplateModule": -        """This method works like the :attr:`module` attribute when called -        without arguments but it will evaluate the template on every call -        rather than caching it.  It's also possible to provide -        a dict which is then used as context.  The arguments are the same -        as for the :meth:`new_context` method. -        """ -        ctx = self.new_context(vars, shared, locals) -        return TemplateModule(self, ctx) - -    async def make_module_async( -        self, -        vars: t.Optional[t.Dict[str, t.Any]] = None, -        shared: bool = False, -        locals: t.Optional[t.Mapping[str, t.Any]] = None, -    ) -> "TemplateModule": -        """As template module creation can invoke template code for -        asynchronous executions this method must be used instead of the -        normal :meth:`make_module` one.  Likewise the module attribute -        becomes unavailable in async mode. -        """ -        ctx = self.new_context(vars, shared, locals) -        return TemplateModule( -            self, ctx, [x async for x in self.root_render_func(ctx)]  # type: ignore -        ) - -    @internalcode -    def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule": -        """If a context is passed in, this means that the template was -        imported. Imported templates have access to the current -        template's globals by default, but they can only be accessed via -        the context during runtime. - -        If there are new globals, we need to create a new module because -        the cached module is already rendered and will not have access -        to globals from the current context. This new module is not -        cached because the template can be imported elsewhere, and it -        should have access to only the current template's globals. -        """ -        if self.environment.is_async: -            raise RuntimeError("Module is not available in async mode.") - -        if ctx is not None: -            keys = ctx.globals_keys - self.globals.keys() - -            if keys: -                return self.make_module({k: ctx.parent[k] for k in keys}) - -        if self._module is None: -            self._module = self.make_module() - -        return self._module - -    async def _get_default_module_async( -        self, ctx: t.Optional[Context] = None -    ) -> "TemplateModule": -        if ctx is not None: -            keys = ctx.globals_keys - self.globals.keys() - -            if keys: -                return await self.make_module_async({k: ctx.parent[k] for k in keys}) - -        if self._module is None: -            self._module = await self.make_module_async() - -        return self._module - -    @property -    def module(self) -> "TemplateModule": -        """The template as module.  This is used for imports in the -        template runtime but is also useful if one wants to access -        exported template variables from the Python layer: - -        >>> t = Template('{% macro foo() %}42{% endmacro %}23') -        >>> str(t.module) -        '23' -        >>> t.module.foo() == u'42' -        True - -        This attribute is not available if async mode is enabled. -        """ -        return self._get_default_module() - -    def get_corresponding_lineno(self, lineno: int) -> int: -        """Return the source line number of a line number in the -        generated bytecode as they are not in sync. -        """ -        for template_line, code_line in reversed(self.debug_info): -            if code_line <= lineno: -                return template_line -        return 1 - -    @property -    def is_up_to_date(self) -> bool: -        """If this variable is `False` there is a newer version available.""" -        if self._uptodate is None: -            return True -        return self._uptodate() - -    @property -    def debug_info(self) -> t.List[t.Tuple[int, int]]: -        """The debug info mapping.""" -        if self._debug_info: -            return [ -                tuple(map(int, x.split("=")))  # type: ignore -                for x in self._debug_info.split("&") -            ] - -        return [] - -    def __repr__(self) -> str: -        if self.name is None: -            name = f"memory:{id(self):x}" -        else: -            name = repr(self.name) -        return f"<{type(self).__name__} {name}>" - - -class TemplateModule: -    """Represents an imported template.  All the exported names of the -    template are available as attributes on this object.  Additionally -    converting it into a string renders the contents. -    """ - -    def __init__( -        self, -        template: Template, -        context: Context, -        body_stream: t.Optional[t.Iterable[str]] = None, -    ) -> None: -        if body_stream is None: -            if context.environment.is_async: -                raise RuntimeError( -                    "Async mode requires a body stream to be passed to" -                    " a template module. Use the async methods of the" -                    " API you are using." -                ) - -            body_stream = list(template.root_render_func(context)) - -        self._body_stream = body_stream -        self.__dict__.update(context.get_exported()) -        self.__name__ = template.name - -    def __html__(self) -> Markup: -        return Markup(concat(self._body_stream)) - -    def __str__(self) -> str: -        return concat(self._body_stream) - -    def __repr__(self) -> str: -        if self.__name__ is None: -            name = f"memory:{id(self):x}" -        else: -            name = repr(self.__name__) -        return f"<{type(self).__name__} {name}>" - - -class TemplateExpression: -    """The :meth:`jinja2.Environment.compile_expression` method returns an -    instance of this object.  It encapsulates the expression-like access -    to the template with an expression it wraps. -    """ - -    def __init__(self, template: Template, undefined_to_none: bool) -> None: -        self._template = template -        self._undefined_to_none = undefined_to_none - -    def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]: -        context = self._template.new_context(dict(*args, **kwargs)) -        consume(self._template.root_render_func(context)) -        rv = context.vars["result"] -        if self._undefined_to_none and isinstance(rv, Undefined): -            rv = None -        return rv - - -class TemplateStream: -    """A template stream works pretty much like an ordinary python generator -    but it can buffer multiple items to reduce the number of total iterations. -    Per default the output is unbuffered which means that for every unbuffered -    instruction in the template one string is yielded. - -    If buffering is enabled with a buffer size of 5, five items are combined -    into a new string.  This is mainly useful if you are streaming -    big templates to a client via WSGI which flushes after each iteration. -    """ - -    def __init__(self, gen: t.Iterator[str]) -> None: -        self._gen = gen -        self.disable_buffering() - -    def dump( -        self, -        fp: t.Union[str, t.IO], -        encoding: t.Optional[str] = None, -        errors: t.Optional[str] = "strict", -    ) -> None: -        """Dump the complete stream into a file or file-like object. -        Per default strings are written, if you want to encode -        before writing specify an `encoding`. - -        Example usage:: - -            Template('Hello {{ name }}!').stream(name='foo').dump('hello.html') -        """ -        close = False - -        if isinstance(fp, str): -            if encoding is None: -                encoding = "utf-8" - -            fp = open(fp, "wb") -            close = True -        try: -            if encoding is not None: -                iterable = (x.encode(encoding, errors) for x in self)  # type: ignore -            else: -                iterable = self  # type: ignore - -            if hasattr(fp, "writelines"): -                fp.writelines(iterable) -            else: -                for item in iterable: -                    fp.write(item) -        finally: -            if close: -                fp.close() - -    def disable_buffering(self) -> None: -        """Disable the output buffering.""" -        self._next = partial(next, self._gen) -        self.buffered = False - -    def _buffered_generator(self, size: int) -> t.Iterator[str]: -        buf: t.List[str] = [] -        c_size = 0 -        push = buf.append - -        while True: -            try: -                while c_size < size: -                    c = next(self._gen) -                    push(c) -                    if c: -                        c_size += 1 -            except StopIteration: -                if not c_size: -                    return -            yield concat(buf) -            del buf[:] -            c_size = 0 - -    def enable_buffering(self, size: int = 5) -> None: -        """Enable buffering.  Buffer `size` items before yielding them.""" -        if size <= 1: -            raise ValueError("buffer size too small") - -        self.buffered = True -        self._next = partial(next, self._buffered_generator(size)) - -    def __iter__(self) -> "TemplateStream": -        return self - -    def __next__(self) -> str: -        return self._next()  # type: ignore - - -# hook in default template class.  if anyone reads this comment: ignore that -# it's possible to use custom templates ;-) -Environment.template_class = Template diff --git a/venv/lib/python3.11/site-packages/jinja2/exceptions.py b/venv/lib/python3.11/site-packages/jinja2/exceptions.py deleted file mode 100644 index 082ebe8..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/exceptions.py +++ /dev/null @@ -1,166 +0,0 @@ -import typing as t - -if t.TYPE_CHECKING: -    from .runtime import Undefined - - -class TemplateError(Exception): -    """Baseclass for all template errors.""" - -    def __init__(self, message: t.Optional[str] = None) -> None: -        super().__init__(message) - -    @property -    def message(self) -> t.Optional[str]: -        return self.args[0] if self.args else None - - -class TemplateNotFound(IOError, LookupError, TemplateError): -    """Raised if a template does not exist. - -    .. versionchanged:: 2.11 -        If the given name is :class:`Undefined` and no message was -        provided, an :exc:`UndefinedError` is raised. -    """ - -    # Silence the Python warning about message being deprecated since -    # it's not valid here. -    message: t.Optional[str] = None - -    def __init__( -        self, -        name: t.Optional[t.Union[str, "Undefined"]], -        message: t.Optional[str] = None, -    ) -> None: -        IOError.__init__(self, name) - -        if message is None: -            from .runtime import Undefined - -            if isinstance(name, Undefined): -                name._fail_with_undefined_error() - -            message = name - -        self.message = message -        self.name = name -        self.templates = [name] - -    def __str__(self) -> str: -        return str(self.message) - - -class TemplatesNotFound(TemplateNotFound): -    """Like :class:`TemplateNotFound` but raised if multiple templates -    are selected.  This is a subclass of :class:`TemplateNotFound` -    exception, so just catching the base exception will catch both. - -    .. versionchanged:: 2.11 -        If a name in the list of names is :class:`Undefined`, a message -        about it being undefined is shown rather than the empty string. - -    .. versionadded:: 2.2 -    """ - -    def __init__( -        self, -        names: t.Sequence[t.Union[str, "Undefined"]] = (), -        message: t.Optional[str] = None, -    ) -> None: -        if message is None: -            from .runtime import Undefined - -            parts = [] - -            for name in names: -                if isinstance(name, Undefined): -                    parts.append(name._undefined_message) -                else: -                    parts.append(name) - -            parts_str = ", ".join(map(str, parts)) -            message = f"none of the templates given were found: {parts_str}" - -        super().__init__(names[-1] if names else None, message) -        self.templates = list(names) - - -class TemplateSyntaxError(TemplateError): -    """Raised to tell the user that there is a problem with the template.""" - -    def __init__( -        self, -        message: str, -        lineno: int, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -    ) -> None: -        super().__init__(message) -        self.lineno = lineno -        self.name = name -        self.filename = filename -        self.source: t.Optional[str] = None - -        # this is set to True if the debug.translate_syntax_error -        # function translated the syntax error into a new traceback -        self.translated = False - -    def __str__(self) -> str: -        # for translated errors we only return the message -        if self.translated: -            return t.cast(str, self.message) - -        # otherwise attach some stuff -        location = f"line {self.lineno}" -        name = self.filename or self.name -        if name: -            location = f'File "{name}", {location}' -        lines = [t.cast(str, self.message), "  " + location] - -        # if the source is set, add the line to the output -        if self.source is not None: -            try: -                line = self.source.splitlines()[self.lineno - 1] -            except IndexError: -                pass -            else: -                lines.append("    " + line.strip()) - -        return "\n".join(lines) - -    def __reduce__(self):  # type: ignore -        # https://bugs.python.org/issue1692335 Exceptions that take -        # multiple required arguments have problems with pickling. -        # Without this, raises TypeError: __init__() missing 1 required -        # positional argument: 'lineno' -        return self.__class__, (self.message, self.lineno, self.name, self.filename) - - -class TemplateAssertionError(TemplateSyntaxError): -    """Like a template syntax error, but covers cases where something in the -    template caused an error at compile time that wasn't necessarily caused -    by a syntax error.  However it's a direct subclass of -    :exc:`TemplateSyntaxError` and has the same attributes. -    """ - - -class TemplateRuntimeError(TemplateError): -    """A generic runtime error in the template engine.  Under some situations -    Jinja may raise this exception. -    """ - - -class UndefinedError(TemplateRuntimeError): -    """Raised if a template tries to operate on :class:`Undefined`.""" - - -class SecurityError(TemplateRuntimeError): -    """Raised if a template tries to do something insecure if the -    sandbox is enabled. -    """ - - -class FilterArgumentError(TemplateRuntimeError): -    """This error is raised if a filter was called with inappropriate -    arguments -    """ diff --git a/venv/lib/python3.11/site-packages/jinja2/ext.py b/venv/lib/python3.11/site-packages/jinja2/ext.py deleted file mode 100644 index fade1fa..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/ext.py +++ /dev/null @@ -1,869 +0,0 @@ -"""Extension API for adding custom tags and behavior.""" -import pprint -import re -import typing as t - -from markupsafe import Markup - -from . import defaults -from . import nodes -from .environment import Environment -from .exceptions import TemplateAssertionError -from .exceptions import TemplateSyntaxError -from .runtime import concat  # type: ignore -from .runtime import Context -from .runtime import Undefined -from .utils import import_string -from .utils import pass_context - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .lexer import Token -    from .lexer import TokenStream -    from .parser import Parser - -    class _TranslationsBasic(te.Protocol): -        def gettext(self, message: str) -> str: -            ... - -        def ngettext(self, singular: str, plural: str, n: int) -> str: -            pass - -    class _TranslationsContext(_TranslationsBasic): -        def pgettext(self, context: str, message: str) -> str: -            ... - -        def npgettext(self, context: str, singular: str, plural: str, n: int) -> str: -            ... - -    _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext] - - -# I18N functions available in Jinja templates. If the I18N library -# provides ugettext, it will be assigned to gettext. -GETTEXT_FUNCTIONS: t.Tuple[str, ...] = ( -    "_", -    "gettext", -    "ngettext", -    "pgettext", -    "npgettext", -) -_ws_re = re.compile(r"\s*\n\s*") - - -class Extension: -    """Extensions can be used to add extra functionality to the Jinja template -    system at the parser level.  Custom extensions are bound to an environment -    but may not store environment specific data on `self`.  The reason for -    this is that an extension can be bound to another environment (for -    overlays) by creating a copy and reassigning the `environment` attribute. - -    As extensions are created by the environment they cannot accept any -    arguments for configuration.  One may want to work around that by using -    a factory function, but that is not possible as extensions are identified -    by their import name.  The correct way to configure the extension is -    storing the configuration values on the environment.  Because this way the -    environment ends up acting as central configuration storage the -    attributes may clash which is why extensions have to ensure that the names -    they choose for configuration are not too generic.  ``prefix`` for example -    is a terrible name, ``fragment_cache_prefix`` on the other hand is a good -    name as includes the name of the extension (fragment cache). -    """ - -    identifier: t.ClassVar[str] - -    def __init_subclass__(cls) -> None: -        cls.identifier = f"{cls.__module__}.{cls.__name__}" - -    #: if this extension parses this is the list of tags it's listening to. -    tags: t.Set[str] = set() - -    #: the priority of that extension.  This is especially useful for -    #: extensions that preprocess values.  A lower value means higher -    #: priority. -    #: -    #: .. versionadded:: 2.4 -    priority = 100 - -    def __init__(self, environment: Environment) -> None: -        self.environment = environment - -    def bind(self, environment: Environment) -> "Extension": -        """Create a copy of this extension bound to another environment.""" -        rv = object.__new__(self.__class__) -        rv.__dict__.update(self.__dict__) -        rv.environment = environment -        return rv - -    def preprocess( -        self, source: str, name: t.Optional[str], filename: t.Optional[str] = None -    ) -> str: -        """This method is called before the actual lexing and can be used to -        preprocess the source.  The `filename` is optional.  The return value -        must be the preprocessed source. -        """ -        return source - -    def filter_stream( -        self, stream: "TokenStream" -    ) -> t.Union["TokenStream", t.Iterable["Token"]]: -        """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used -        to filter tokens returned.  This method has to return an iterable of -        :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a -        :class:`~jinja2.lexer.TokenStream`. -        """ -        return stream - -    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: -        """If any of the :attr:`tags` matched this method is called with the -        parser as first argument.  The token the parser stream is pointing at -        is the name token that matched.  This method has to return one or a -        list of multiple nodes. -        """ -        raise NotImplementedError() - -    def attr( -        self, name: str, lineno: t.Optional[int] = None -    ) -> nodes.ExtensionAttribute: -        """Return an attribute node for the current extension.  This is useful -        to pass constants on extensions to generated template code. - -        :: - -            self.attr('_my_attribute', lineno=lineno) -        """ -        return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno) - -    def call_method( -        self, -        name: str, -        args: t.Optional[t.List[nodes.Expr]] = None, -        kwargs: t.Optional[t.List[nodes.Keyword]] = None, -        dyn_args: t.Optional[nodes.Expr] = None, -        dyn_kwargs: t.Optional[nodes.Expr] = None, -        lineno: t.Optional[int] = None, -    ) -> nodes.Call: -        """Call a method of the extension.  This is a shortcut for -        :meth:`attr` + :class:`jinja2.nodes.Call`. -        """ -        if args is None: -            args = [] -        if kwargs is None: -            kwargs = [] -        return nodes.Call( -            self.attr(name, lineno=lineno), -            args, -            kwargs, -            dyn_args, -            dyn_kwargs, -            lineno=lineno, -        ) - - -@pass_context -def _gettext_alias( -    __context: Context, *args: t.Any, **kwargs: t.Any -) -> t.Union[t.Any, Undefined]: -    return __context.call(__context.resolve("gettext"), *args, **kwargs) - - -def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]: -    @pass_context -    def gettext(__context: Context, __string: str, **variables: t.Any) -> str: -        rv = __context.call(func, __string) -        if __context.eval_ctx.autoescape: -            rv = Markup(rv) -        # Always treat as a format string, even if there are no -        # variables. This makes translation strings more consistent -        # and predictable. This requires escaping -        return rv % variables  # type: ignore - -    return gettext - - -def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]: -    @pass_context -    def ngettext( -        __context: Context, -        __singular: str, -        __plural: str, -        __num: int, -        **variables: t.Any, -    ) -> str: -        variables.setdefault("num", __num) -        rv = __context.call(func, __singular, __plural, __num) -        if __context.eval_ctx.autoescape: -            rv = Markup(rv) -        # Always treat as a format string, see gettext comment above. -        return rv % variables  # type: ignore - -    return ngettext - - -def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]: -    @pass_context -    def pgettext( -        __context: Context, __string_ctx: str, __string: str, **variables: t.Any -    ) -> str: -        variables.setdefault("context", __string_ctx) -        rv = __context.call(func, __string_ctx, __string) - -        if __context.eval_ctx.autoescape: -            rv = Markup(rv) - -        # Always treat as a format string, see gettext comment above. -        return rv % variables  # type: ignore - -    return pgettext - - -def _make_new_npgettext( -    func: t.Callable[[str, str, str, int], str] -) -> t.Callable[..., str]: -    @pass_context -    def npgettext( -        __context: Context, -        __string_ctx: str, -        __singular: str, -        __plural: str, -        __num: int, -        **variables: t.Any, -    ) -> str: -        variables.setdefault("context", __string_ctx) -        variables.setdefault("num", __num) -        rv = __context.call(func, __string_ctx, __singular, __plural, __num) - -        if __context.eval_ctx.autoescape: -            rv = Markup(rv) - -        # Always treat as a format string, see gettext comment above. -        return rv % variables  # type: ignore - -    return npgettext - - -class InternationalizationExtension(Extension): -    """This extension adds gettext support to Jinja.""" - -    tags = {"trans"} - -    # TODO: the i18n extension is currently reevaluating values in a few -    # situations.  Take this example: -    #   {% trans count=something() %}{{ count }} foo{% pluralize -    #     %}{{ count }} fooss{% endtrans %} -    # something is called twice here.  One time for the gettext value and -    # the other time for the n-parameter of the ngettext function. - -    def __init__(self, environment: Environment) -> None: -        super().__init__(environment) -        environment.globals["_"] = _gettext_alias -        environment.extend( -            install_gettext_translations=self._install, -            install_null_translations=self._install_null, -            install_gettext_callables=self._install_callables, -            uninstall_gettext_translations=self._uninstall, -            extract_translations=self._extract, -            newstyle_gettext=False, -        ) - -    def _install( -        self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None -    ) -> None: -        # ugettext and ungettext are preferred in case the I18N library -        # is providing compatibility with older Python versions. -        gettext = getattr(translations, "ugettext", None) -        if gettext is None: -            gettext = translations.gettext -        ngettext = getattr(translations, "ungettext", None) -        if ngettext is None: -            ngettext = translations.ngettext - -        pgettext = getattr(translations, "pgettext", None) -        npgettext = getattr(translations, "npgettext", None) -        self._install_callables( -            gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext -        ) - -    def _install_null(self, newstyle: t.Optional[bool] = None) -> None: -        import gettext - -        translations = gettext.NullTranslations() - -        if hasattr(translations, "pgettext"): -            # Python < 3.8 -            pgettext = translations.pgettext -        else: - -            def pgettext(c: str, s: str) -> str: -                return s - -        if hasattr(translations, "npgettext"): -            npgettext = translations.npgettext -        else: - -            def npgettext(c: str, s: str, p: str, n: int) -> str: -                return s if n == 1 else p - -        self._install_callables( -            gettext=translations.gettext, -            ngettext=translations.ngettext, -            newstyle=newstyle, -            pgettext=pgettext, -            npgettext=npgettext, -        ) - -    def _install_callables( -        self, -        gettext: t.Callable[[str], str], -        ngettext: t.Callable[[str, str, int], str], -        newstyle: t.Optional[bool] = None, -        pgettext: t.Optional[t.Callable[[str, str], str]] = None, -        npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None, -    ) -> None: -        if newstyle is not None: -            self.environment.newstyle_gettext = newstyle  # type: ignore -        if self.environment.newstyle_gettext:  # type: ignore -            gettext = _make_new_gettext(gettext) -            ngettext = _make_new_ngettext(ngettext) - -            if pgettext is not None: -                pgettext = _make_new_pgettext(pgettext) - -            if npgettext is not None: -                npgettext = _make_new_npgettext(npgettext) - -        self.environment.globals.update( -            gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext -        ) - -    def _uninstall(self, translations: "_SupportedTranslations") -> None: -        for key in ("gettext", "ngettext", "pgettext", "npgettext"): -            self.environment.globals.pop(key, None) - -    def _extract( -        self, -        source: t.Union[str, nodes.Template], -        gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, -    ) -> t.Iterator[ -        t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] -    ]: -        if isinstance(source, str): -            source = self.environment.parse(source) -        return extract_from_ast(source, gettext_functions) - -    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: -        """Parse a translatable tag.""" -        lineno = next(parser.stream).lineno - -        context = None -        context_token = parser.stream.next_if("string") - -        if context_token is not None: -            context = context_token.value - -        # find all the variables referenced.  Additionally a variable can be -        # defined in the body of the trans block too, but this is checked at -        # a later state. -        plural_expr: t.Optional[nodes.Expr] = None -        plural_expr_assignment: t.Optional[nodes.Assign] = None -        num_called_num = False -        variables: t.Dict[str, nodes.Expr] = {} -        trimmed = None -        while parser.stream.current.type != "block_end": -            if variables: -                parser.stream.expect("comma") - -            # skip colon for python compatibility -            if parser.stream.skip_if("colon"): -                break - -            token = parser.stream.expect("name") -            if token.value in variables: -                parser.fail( -                    f"translatable variable {token.value!r} defined twice.", -                    token.lineno, -                    exc=TemplateAssertionError, -                ) - -            # expressions -            if parser.stream.current.type == "assign": -                next(parser.stream) -                variables[token.value] = var = parser.parse_expression() -            elif trimmed is None and token.value in ("trimmed", "notrimmed"): -                trimmed = token.value == "trimmed" -                continue -            else: -                variables[token.value] = var = nodes.Name(token.value, "load") - -            if plural_expr is None: -                if isinstance(var, nodes.Call): -                    plural_expr = nodes.Name("_trans", "load") -                    variables[token.value] = plural_expr -                    plural_expr_assignment = nodes.Assign( -                        nodes.Name("_trans", "store"), var -                    ) -                else: -                    plural_expr = var -                num_called_num = token.value == "num" - -        parser.stream.expect("block_end") - -        plural = None -        have_plural = False -        referenced = set() - -        # now parse until endtrans or pluralize -        singular_names, singular = self._parse_block(parser, True) -        if singular_names: -            referenced.update(singular_names) -            if plural_expr is None: -                plural_expr = nodes.Name(singular_names[0], "load") -                num_called_num = singular_names[0] == "num" - -        # if we have a pluralize block, we parse that too -        if parser.stream.current.test("name:pluralize"): -            have_plural = True -            next(parser.stream) -            if parser.stream.current.type != "block_end": -                token = parser.stream.expect("name") -                if token.value not in variables: -                    parser.fail( -                        f"unknown variable {token.value!r} for pluralization", -                        token.lineno, -                        exc=TemplateAssertionError, -                    ) -                plural_expr = variables[token.value] -                num_called_num = token.value == "num" -            parser.stream.expect("block_end") -            plural_names, plural = self._parse_block(parser, False) -            next(parser.stream) -            referenced.update(plural_names) -        else: -            next(parser.stream) - -        # register free names as simple name expressions -        for name in referenced: -            if name not in variables: -                variables[name] = nodes.Name(name, "load") - -        if not have_plural: -            plural_expr = None -        elif plural_expr is None: -            parser.fail("pluralize without variables", lineno) - -        if trimmed is None: -            trimmed = self.environment.policies["ext.i18n.trimmed"] -        if trimmed: -            singular = self._trim_whitespace(singular) -            if plural: -                plural = self._trim_whitespace(plural) - -        node = self._make_node( -            singular, -            plural, -            context, -            variables, -            plural_expr, -            bool(referenced), -            num_called_num and have_plural, -        ) -        node.set_lineno(lineno) -        if plural_expr_assignment is not None: -            return [plural_expr_assignment, node] -        else: -            return node - -    def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str: -        return _ws_re.sub(" ", string.strip()) - -    def _parse_block( -        self, parser: "Parser", allow_pluralize: bool -    ) -> t.Tuple[t.List[str], str]: -        """Parse until the next block tag with a given name.""" -        referenced = [] -        buf = [] - -        while True: -            if parser.stream.current.type == "data": -                buf.append(parser.stream.current.value.replace("%", "%%")) -                next(parser.stream) -            elif parser.stream.current.type == "variable_begin": -                next(parser.stream) -                name = parser.stream.expect("name").value -                referenced.append(name) -                buf.append(f"%({name})s") -                parser.stream.expect("variable_end") -            elif parser.stream.current.type == "block_begin": -                next(parser.stream) -                block_name = ( -                    parser.stream.current.value -                    if parser.stream.current.type == "name" -                    else None -                ) -                if block_name == "endtrans": -                    break -                elif block_name == "pluralize": -                    if allow_pluralize: -                        break -                    parser.fail( -                        "a translatable section can have only one pluralize section" -                    ) -                elif block_name == "trans": -                    parser.fail( -                        "trans blocks can't be nested; did you mean `endtrans`?" -                    ) -                parser.fail( -                    f"control structures in translatable sections are not allowed; " -                    f"saw `{block_name}`" -                ) -            elif parser.stream.eos: -                parser.fail("unclosed translation block") -            else: -                raise RuntimeError("internal parser error") - -        return referenced, concat(buf) - -    def _make_node( -        self, -        singular: str, -        plural: t.Optional[str], -        context: t.Optional[str], -        variables: t.Dict[str, nodes.Expr], -        plural_expr: t.Optional[nodes.Expr], -        vars_referenced: bool, -        num_called_num: bool, -    ) -> nodes.Output: -        """Generates a useful node from the data provided.""" -        newstyle = self.environment.newstyle_gettext  # type: ignore -        node: nodes.Expr - -        # no variables referenced?  no need to escape for old style -        # gettext invocations only if there are vars. -        if not vars_referenced and not newstyle: -            singular = singular.replace("%%", "%") -            if plural: -                plural = plural.replace("%%", "%") - -        func_name = "gettext" -        func_args: t.List[nodes.Expr] = [nodes.Const(singular)] - -        if context is not None: -            func_args.insert(0, nodes.Const(context)) -            func_name = f"p{func_name}" - -        if plural_expr is not None: -            func_name = f"n{func_name}" -            func_args.extend((nodes.Const(plural), plural_expr)) - -        node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None) - -        # in case newstyle gettext is used, the method is powerful -        # enough to handle the variable expansion and autoescape -        # handling itself -        if newstyle: -            for key, value in variables.items(): -                # the function adds that later anyways in case num was -                # called num, so just skip it. -                if num_called_num and key == "num": -                    continue -                node.kwargs.append(nodes.Keyword(key, value)) - -        # otherwise do that here -        else: -            # mark the return value as safe if we are in an -            # environment with autoescaping turned on -            node = nodes.MarkSafeIfAutoescape(node) -            if variables: -                node = nodes.Mod( -                    node, -                    nodes.Dict( -                        [ -                            nodes.Pair(nodes.Const(key), value) -                            for key, value in variables.items() -                        ] -                    ), -                ) -        return nodes.Output([node]) - - -class ExprStmtExtension(Extension): -    """Adds a `do` tag to Jinja that works like the print statement just -    that it doesn't print the return value. -    """ - -    tags = {"do"} - -    def parse(self, parser: "Parser") -> nodes.ExprStmt: -        node = nodes.ExprStmt(lineno=next(parser.stream).lineno) -        node.node = parser.parse_tuple() -        return node - - -class LoopControlExtension(Extension): -    """Adds break and continue to the template engine.""" - -    tags = {"break", "continue"} - -    def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]: -        token = next(parser.stream) -        if token.value == "break": -            return nodes.Break(lineno=token.lineno) -        return nodes.Continue(lineno=token.lineno) - - -class DebugExtension(Extension): -    """A ``{% debug %}`` tag that dumps the available variables, -    filters, and tests. - -    .. code-block:: html+jinja - -        <pre>{% debug %}</pre> - -    .. code-block:: text - -        {'context': {'cycler': <class 'jinja2.utils.Cycler'>, -                     ..., -                     'namespace': <class 'jinja2.utils.Namespace'>}, -         'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd', -                     ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'], -         'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined', -                   ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']} - -    .. versionadded:: 2.11.0 -    """ - -    tags = {"debug"} - -    def parse(self, parser: "Parser") -> nodes.Output: -        lineno = parser.stream.expect("name:debug").lineno -        context = nodes.ContextReference() -        result = self.call_method("_render", [context], lineno=lineno) -        return nodes.Output([result], lineno=lineno) - -    def _render(self, context: Context) -> str: -        result = { -            "context": context.get_all(), -            "filters": sorted(self.environment.filters.keys()), -            "tests": sorted(self.environment.tests.keys()), -        } - -        # Set the depth since the intent is to show the top few names. -        return pprint.pformat(result, depth=3, compact=True) - - -def extract_from_ast( -    ast: nodes.Template, -    gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, -    babel_style: bool = True, -) -> t.Iterator[ -    t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] -]: -    """Extract localizable strings from the given template node.  Per -    default this function returns matches in babel style that means non string -    parameters as well as keyword arguments are returned as `None`.  This -    allows Babel to figure out what you really meant if you are using -    gettext functions that allow keyword arguments for placeholder expansion. -    If you don't want that behavior set the `babel_style` parameter to `False` -    which causes only strings to be returned and parameters are always stored -    in tuples.  As a consequence invalid gettext calls (calls without a single -    string parameter or string parameters after non-string parameters) are -    skipped. - -    This example explains the behavior: - -    >>> from jinja2 import Environment -    >>> env = Environment() -    >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}') -    >>> list(extract_from_ast(node)) -    [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))] -    >>> list(extract_from_ast(node, babel_style=False)) -    [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))] - -    For every string found this function yields a ``(lineno, function, -    message)`` tuple, where: - -    * ``lineno`` is the number of the line on which the string was found, -    * ``function`` is the name of the ``gettext`` function used (if the -      string was extracted from embedded Python code), and -    *   ``message`` is the string, or a tuple of strings for functions -         with multiple string arguments. - -    This extraction function operates on the AST and is because of that unable -    to extract any comments.  For comment support you have to use the babel -    extraction interface or extract comments yourself. -    """ -    out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]] - -    for node in ast.find_all(nodes.Call): -        if ( -            not isinstance(node.node, nodes.Name) -            or node.node.name not in gettext_functions -        ): -            continue - -        strings: t.List[t.Optional[str]] = [] - -        for arg in node.args: -            if isinstance(arg, nodes.Const) and isinstance(arg.value, str): -                strings.append(arg.value) -            else: -                strings.append(None) - -        for _ in node.kwargs: -            strings.append(None) -        if node.dyn_args is not None: -            strings.append(None) -        if node.dyn_kwargs is not None: -            strings.append(None) - -        if not babel_style: -            out = tuple(x for x in strings if x is not None) - -            if not out: -                continue -        else: -            if len(strings) == 1: -                out = strings[0] -            else: -                out = tuple(strings) - -        yield node.lineno, node.node.name, out - - -class _CommentFinder: -    """Helper class to find comments in a token stream.  Can only -    find comments for gettext calls forwards.  Once the comment -    from line 4 is found, a comment for line 1 will not return a -    usable value. -    """ - -    def __init__( -        self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str] -    ) -> None: -        self.tokens = tokens -        self.comment_tags = comment_tags -        self.offset = 0 -        self.last_lineno = 0 - -    def find_backwards(self, offset: int) -> t.List[str]: -        try: -            for _, token_type, token_value in reversed( -                self.tokens[self.offset : offset] -            ): -                if token_type in ("comment", "linecomment"): -                    try: -                        prefix, comment = token_value.split(None, 1) -                    except ValueError: -                        continue -                    if prefix in self.comment_tags: -                        return [comment.rstrip()] -            return [] -        finally: -            self.offset = offset - -    def find_comments(self, lineno: int) -> t.List[str]: -        if not self.comment_tags or self.last_lineno > lineno: -            return [] -        for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]): -            if token_lineno > lineno: -                return self.find_backwards(self.offset + idx) -        return self.find_backwards(len(self.tokens)) - - -def babel_extract( -    fileobj: t.BinaryIO, -    keywords: t.Sequence[str], -    comment_tags: t.Sequence[str], -    options: t.Dict[str, t.Any], -) -> t.Iterator[ -    t.Tuple[ -        int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str] -    ] -]: -    """Babel extraction method for Jinja templates. - -    .. versionchanged:: 2.3 -       Basic support for translation comments was added.  If `comment_tags` -       is now set to a list of keywords for extraction, the extractor will -       try to find the best preceding comment that begins with one of the -       keywords.  For best results, make sure to not have more than one -       gettext call in one line of code and the matching comment in the -       same line or the line before. - -    .. versionchanged:: 2.5.1 -       The `newstyle_gettext` flag can be set to `True` to enable newstyle -       gettext calls. - -    .. versionchanged:: 2.7 -       A `silent` option can now be provided.  If set to `False` template -       syntax errors are propagated instead of being ignored. - -    :param fileobj: the file-like object the messages should be extracted from -    :param keywords: a list of keywords (i.e. function names) that should be -                     recognized as translation functions -    :param comment_tags: a list of translator tags to search for and include -                         in the results. -    :param options: a dictionary of additional options (optional) -    :return: an iterator over ``(lineno, funcname, message, comments)`` tuples. -             (comments will be empty currently) -    """ -    extensions: t.Dict[t.Type[Extension], None] = {} - -    for extension_name in options.get("extensions", "").split(","): -        extension_name = extension_name.strip() - -        if not extension_name: -            continue - -        extensions[import_string(extension_name)] = None - -    if InternationalizationExtension not in extensions: -        extensions[InternationalizationExtension] = None - -    def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool: -        return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"} - -    silent = getbool(options, "silent", True) -    environment = Environment( -        options.get("block_start_string", defaults.BLOCK_START_STRING), -        options.get("block_end_string", defaults.BLOCK_END_STRING), -        options.get("variable_start_string", defaults.VARIABLE_START_STRING), -        options.get("variable_end_string", defaults.VARIABLE_END_STRING), -        options.get("comment_start_string", defaults.COMMENT_START_STRING), -        options.get("comment_end_string", defaults.COMMENT_END_STRING), -        options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX, -        options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX, -        getbool(options, "trim_blocks", defaults.TRIM_BLOCKS), -        getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS), -        defaults.NEWLINE_SEQUENCE, -        getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE), -        tuple(extensions), -        cache_size=0, -        auto_reload=False, -    ) - -    if getbool(options, "trimmed"): -        environment.policies["ext.i18n.trimmed"] = True -    if getbool(options, "newstyle_gettext"): -        environment.newstyle_gettext = True  # type: ignore - -    source = fileobj.read().decode(options.get("encoding", "utf-8")) -    try: -        node = environment.parse(source) -        tokens = list(environment.lex(environment.preprocess(source))) -    except TemplateSyntaxError: -        if not silent: -            raise -        # skip templates with syntax errors -        return - -    finder = _CommentFinder(tokens, comment_tags) -    for lineno, func, message in extract_from_ast(node, keywords): -        yield lineno, func, message, finder.find_comments(lineno) - - -#: nicer import names -i18n = InternationalizationExtension -do = ExprStmtExtension -loopcontrols = LoopControlExtension -debug = DebugExtension diff --git a/venv/lib/python3.11/site-packages/jinja2/filters.py b/venv/lib/python3.11/site-packages/jinja2/filters.py deleted file mode 100644 index c7ecc9b..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/filters.py +++ /dev/null @@ -1,1854 +0,0 @@ -"""Built-in template filters used with the ``|`` operator.""" -import math -import random -import re -import typing -import typing as t -from collections import abc -from itertools import chain -from itertools import groupby - -from markupsafe import escape -from markupsafe import Markup -from markupsafe import soft_str - -from .async_utils import async_variant -from .async_utils import auto_aiter -from .async_utils import auto_await -from .async_utils import auto_to_list -from .exceptions import FilterArgumentError -from .runtime import Undefined -from .utils import htmlsafe_json_dumps -from .utils import pass_context -from .utils import pass_environment -from .utils import pass_eval_context -from .utils import pformat -from .utils import url_quote -from .utils import urlize - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment -    from .nodes import EvalContext -    from .runtime import Context -    from .sandbox import SandboxedEnvironment  # noqa: F401 - -    class HasHTML(te.Protocol): -        def __html__(self) -> str: -            pass - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -K = t.TypeVar("K") -V = t.TypeVar("V") - - -def ignore_case(value: V) -> V: -    """For use as a postprocessor for :func:`make_attrgetter`. Converts strings -    to lowercase and returns other types as-is.""" -    if isinstance(value, str): -        return t.cast(V, value.lower()) - -    return value - - -def make_attrgetter( -    environment: "Environment", -    attribute: t.Optional[t.Union[str, int]], -    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, -    default: t.Optional[t.Any] = None, -) -> t.Callable[[t.Any], t.Any]: -    """Returns a callable that looks up the given attribute from a -    passed object with the rules of the environment.  Dots are allowed -    to access attributes of attributes.  Integer parts in paths are -    looked up as integers. -    """ -    parts = _prepare_attribute_parts(attribute) - -    def attrgetter(item: t.Any) -> t.Any: -        for part in parts: -            item = environment.getitem(item, part) - -            if default is not None and isinstance(item, Undefined): -                item = default - -        if postprocess is not None: -            item = postprocess(item) - -        return item - -    return attrgetter - - -def make_multi_attrgetter( -    environment: "Environment", -    attribute: t.Optional[t.Union[str, int]], -    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, -) -> t.Callable[[t.Any], t.List[t.Any]]: -    """Returns a callable that looks up the given comma separated -    attributes from a passed object with the rules of the environment. -    Dots are allowed to access attributes of each attribute.  Integer -    parts in paths are looked up as integers. - -    The value returned by the returned callable is a list of extracted -    attribute values. - -    Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc. -    """ -    if isinstance(attribute, str): -        split: t.Sequence[t.Union[str, int, None]] = attribute.split(",") -    else: -        split = [attribute] - -    parts = [_prepare_attribute_parts(item) for item in split] - -    def attrgetter(item: t.Any) -> t.List[t.Any]: -        items = [None] * len(parts) - -        for i, attribute_part in enumerate(parts): -            item_i = item - -            for part in attribute_part: -                item_i = environment.getitem(item_i, part) - -            if postprocess is not None: -                item_i = postprocess(item_i) - -            items[i] = item_i - -        return items - -    return attrgetter - - -def _prepare_attribute_parts( -    attr: t.Optional[t.Union[str, int]] -) -> t.List[t.Union[str, int]]: -    if attr is None: -        return [] - -    if isinstance(attr, str): -        return [int(x) if x.isdigit() else x for x in attr.split(".")] - -    return [attr] - - -def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup: -    """Enforce HTML escaping.  This will probably double escape variables.""" -    if hasattr(value, "__html__"): -        value = t.cast("HasHTML", value).__html__() - -    return escape(str(value)) - - -def do_urlencode( -    value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]] -) -> str: -    """Quote data for use in a URL path or query using UTF-8. - -    Basic wrapper around :func:`urllib.parse.quote` when given a -    string, or :func:`urllib.parse.urlencode` for a dict or iterable. - -    :param value: Data to quote. A string will be quoted directly. A -        dict or iterable of ``(key, value)`` pairs will be joined as a -        query string. - -    When given a string, "/" is not quoted. HTTP servers treat "/" and -    "%2F" equivalently in paths. If you need quoted slashes, use the -    ``|replace("/", "%2F")`` filter. - -    .. versionadded:: 2.7 -    """ -    if isinstance(value, str) or not isinstance(value, abc.Iterable): -        return url_quote(value) - -    if isinstance(value, dict): -        items: t.Iterable[t.Tuple[str, t.Any]] = value.items() -    else: -        items = value  # type: ignore - -    return "&".join( -        f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items -    ) - - -@pass_eval_context -def do_replace( -    eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None -) -> str: -    """Return a copy of the value with all occurrences of a substring -    replaced with a new one. The first argument is the substring -    that should be replaced, the second is the replacement string. -    If the optional third argument ``count`` is given, only the first -    ``count`` occurrences are replaced: - -    .. sourcecode:: jinja - -        {{ "Hello World"|replace("Hello", "Goodbye") }} -            -> Goodbye World - -        {{ "aaaaargh"|replace("a", "d'oh, ", 2) }} -            -> d'oh, d'oh, aaargh -    """ -    if count is None: -        count = -1 - -    if not eval_ctx.autoescape: -        return str(s).replace(str(old), str(new), count) - -    if ( -        hasattr(old, "__html__") -        or hasattr(new, "__html__") -        and not hasattr(s, "__html__") -    ): -        s = escape(s) -    else: -        s = soft_str(s) - -    return s.replace(soft_str(old), soft_str(new), count) - - -def do_upper(s: str) -> str: -    """Convert a value to uppercase.""" -    return soft_str(s).upper() - - -def do_lower(s: str) -> str: -    """Convert a value to lowercase.""" -    return soft_str(s).lower() - - -def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]: -    """Return an iterator over the ``(key, value)`` items of a mapping. - -    ``x|items`` is the same as ``x.items()``, except if ``x`` is -    undefined an empty iterator is returned. - -    This filter is useful if you expect the template to be rendered with -    an implementation of Jinja in another programming language that does -    not have a ``.items()`` method on its mapping type. - -    .. code-block:: html+jinja - -        <dl> -        {% for key, value in my_dict|items %} -            <dt>{{ key }} -            <dd>{{ value }} -        {% endfor %} -        </dl> - -    .. versionadded:: 3.1 -    """ -    if isinstance(value, Undefined): -        return - -    if not isinstance(value, abc.Mapping): -        raise TypeError("Can only get item pairs from a mapping.") - -    yield from value.items() - - -_space_re = re.compile(r"\s", flags=re.ASCII) - - -@pass_eval_context -def do_xmlattr( -    eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True -) -> str: -    """Create an SGML/XML attribute string based on the items in a dict. - -    If any key contains a space, this fails with a ``ValueError``. Values that -    are neither ``none`` nor ``undefined`` are automatically escaped. - -    .. sourcecode:: html+jinja - -        <ul{{ {'class': 'my_list', 'missing': none, -                'id': 'list-%d'|format(variable)}|xmlattr }}> -        ... -        </ul> - -    Results in something like this: - -    .. sourcecode:: html - -        <ul class="my_list" id="list-42"> -        ... -        </ul> - -    As you can see it automatically prepends a space in front of the item -    if the filter returned something unless the second parameter is false. - -    .. versionchanged:: 3.1.3 -        Keys with spaces are not allowed. -    """ -    items = [] - -    for key, value in d.items(): -        if value is None or isinstance(value, Undefined): -            continue - -        if _space_re.search(key) is not None: -            raise ValueError(f"Spaces are not allowed in attributes: '{key}'") - -        items.append(f'{escape(key)}="{escape(value)}"') - -    rv = " ".join(items) - -    if autospace and rv: -        rv = " " + rv - -    if eval_ctx.autoescape: -        rv = Markup(rv) - -    return rv - - -def do_capitalize(s: str) -> str: -    """Capitalize a value. The first character will be uppercase, all others -    lowercase. -    """ -    return soft_str(s).capitalize() - - -_word_beginning_split_re = re.compile(r"([-\s({\[<]+)") - - -def do_title(s: str) -> str: -    """Return a titlecased version of the value. I.e. words will start with -    uppercase letters, all remaining characters are lowercase. -    """ -    return "".join( -        [ -            item[0].upper() + item[1:].lower() -            for item in _word_beginning_split_re.split(soft_str(s)) -            if item -        ] -    ) - - -def do_dictsort( -    value: t.Mapping[K, V], -    case_sensitive: bool = False, -    by: 'te.Literal["key", "value"]' = "key", -    reverse: bool = False, -) -> t.List[t.Tuple[K, V]]: -    """Sort a dict and yield (key, value) pairs. Python dicts may not -    be in the order you want to display them in, so sort them first. - -    .. sourcecode:: jinja - -        {% for key, value in mydict|dictsort %} -            sort the dict by key, case insensitive - -        {% for key, value in mydict|dictsort(reverse=true) %} -            sort the dict by key, case insensitive, reverse order - -        {% for key, value in mydict|dictsort(true) %} -            sort the dict by key, case sensitive - -        {% for key, value in mydict|dictsort(false, 'value') %} -            sort the dict by value, case insensitive -    """ -    if by == "key": -        pos = 0 -    elif by == "value": -        pos = 1 -    else: -        raise FilterArgumentError('You can only sort by either "key" or "value"') - -    def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any: -        value = item[pos] - -        if not case_sensitive: -            value = ignore_case(value) - -        return value - -    return sorted(value.items(), key=sort_func, reverse=reverse) - - -@pass_environment -def do_sort( -    environment: "Environment", -    value: "t.Iterable[V]", -    reverse: bool = False, -    case_sensitive: bool = False, -    attribute: t.Optional[t.Union[str, int]] = None, -) -> "t.List[V]": -    """Sort an iterable using Python's :func:`sorted`. - -    .. sourcecode:: jinja - -        {% for city in cities|sort %} -            ... -        {% endfor %} - -    :param reverse: Sort descending instead of ascending. -    :param case_sensitive: When sorting strings, sort upper and lower -        case separately. -    :param attribute: When sorting objects or dicts, an attribute or -        key to sort by. Can use dot notation like ``"address.city"``. -        Can be a list of attributes like ``"age,name"``. - -    The sort is stable, it does not change the relative order of -    elements that compare equal. This makes it is possible to chain -    sorts on different attributes and ordering. - -    .. sourcecode:: jinja - -        {% for user in users|sort(attribute="name") -            |sort(reverse=true, attribute="age") %} -            ... -        {% endfor %} - -    As a shortcut to chaining when the direction is the same for all -    attributes, pass a comma separate list of attributes. - -    .. sourcecode:: jinja - -        {% for user in users|sort(attribute="age,name") %} -            ... -        {% endfor %} - -    .. versionchanged:: 2.11.0 -        The ``attribute`` parameter can be a comma separated list of -        attributes, e.g. ``"age,name"``. - -    .. versionchanged:: 2.6 -       The ``attribute`` parameter was added. -    """ -    key_func = make_multi_attrgetter( -        environment, attribute, postprocess=ignore_case if not case_sensitive else None -    ) -    return sorted(value, key=key_func, reverse=reverse) - - -@pass_environment -def do_unique( -    environment: "Environment", -    value: "t.Iterable[V]", -    case_sensitive: bool = False, -    attribute: t.Optional[t.Union[str, int]] = None, -) -> "t.Iterator[V]": -    """Returns a list of unique items from the given iterable. - -    .. sourcecode:: jinja - -        {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }} -            -> ['foo', 'bar', 'foobar'] - -    The unique items are yielded in the same order as their first occurrence in -    the iterable passed to the filter. - -    :param case_sensitive: Treat upper and lower case strings as distinct. -    :param attribute: Filter objects with unique values for this attribute. -    """ -    getter = make_attrgetter( -        environment, attribute, postprocess=ignore_case if not case_sensitive else None -    ) -    seen = set() - -    for item in value: -        key = getter(item) - -        if key not in seen: -            seen.add(key) -            yield item - - -def _min_or_max( -    environment: "Environment", -    value: "t.Iterable[V]", -    func: "t.Callable[..., V]", -    case_sensitive: bool, -    attribute: t.Optional[t.Union[str, int]], -) -> "t.Union[V, Undefined]": -    it = iter(value) - -    try: -        first = next(it) -    except StopIteration: -        return environment.undefined("No aggregated item, sequence was empty.") - -    key_func = make_attrgetter( -        environment, attribute, postprocess=ignore_case if not case_sensitive else None -    ) -    return func(chain([first], it), key=key_func) - - -@pass_environment -def do_min( -    environment: "Environment", -    value: "t.Iterable[V]", -    case_sensitive: bool = False, -    attribute: t.Optional[t.Union[str, int]] = None, -) -> "t.Union[V, Undefined]": -    """Return the smallest item from the sequence. - -    .. sourcecode:: jinja - -        {{ [1, 2, 3]|min }} -            -> 1 - -    :param case_sensitive: Treat upper and lower case strings as distinct. -    :param attribute: Get the object with the min value of this attribute. -    """ -    return _min_or_max(environment, value, min, case_sensitive, attribute) - - -@pass_environment -def do_max( -    environment: "Environment", -    value: "t.Iterable[V]", -    case_sensitive: bool = False, -    attribute: t.Optional[t.Union[str, int]] = None, -) -> "t.Union[V, Undefined]": -    """Return the largest item from the sequence. - -    .. sourcecode:: jinja - -        {{ [1, 2, 3]|max }} -            -> 3 - -    :param case_sensitive: Treat upper and lower case strings as distinct. -    :param attribute: Get the object with the max value of this attribute. -    """ -    return _min_or_max(environment, value, max, case_sensitive, attribute) - - -def do_default( -    value: V, -    default_value: V = "",  # type: ignore -    boolean: bool = False, -) -> V: -    """If the value is undefined it will return the passed default value, -    otherwise the value of the variable: - -    .. sourcecode:: jinja - -        {{ my_variable|default('my_variable is not defined') }} - -    This will output the value of ``my_variable`` if the variable was -    defined, otherwise ``'my_variable is not defined'``. If you want -    to use default with variables that evaluate to false you have to -    set the second parameter to `true`: - -    .. sourcecode:: jinja - -        {{ ''|default('the string was empty', true) }} - -    .. versionchanged:: 2.11 -       It's now possible to configure the :class:`~jinja2.Environment` with -       :class:`~jinja2.ChainableUndefined` to make the `default` filter work -       on nested elements and attributes that may contain undefined values -       in the chain without getting an :exc:`~jinja2.UndefinedError`. -    """ -    if isinstance(value, Undefined) or (boolean and not value): -        return default_value - -    return value - - -@pass_eval_context -def sync_do_join( -    eval_ctx: "EvalContext", -    value: t.Iterable, -    d: str = "", -    attribute: t.Optional[t.Union[str, int]] = None, -) -> str: -    """Return a string which is the concatenation of the strings in the -    sequence. The separator between elements is an empty string per -    default, you can define it with the optional parameter: - -    .. sourcecode:: jinja - -        {{ [1, 2, 3]|join('|') }} -            -> 1|2|3 - -        {{ [1, 2, 3]|join }} -            -> 123 - -    It is also possible to join certain attributes of an object: - -    .. sourcecode:: jinja - -        {{ users|join(', ', attribute='username') }} - -    .. versionadded:: 2.6 -       The `attribute` parameter was added. -    """ -    if attribute is not None: -        value = map(make_attrgetter(eval_ctx.environment, attribute), value) - -    # no automatic escaping?  joining is a lot easier then -    if not eval_ctx.autoescape: -        return str(d).join(map(str, value)) - -    # if the delimiter doesn't have an html representation we check -    # if any of the items has.  If yes we do a coercion to Markup -    if not hasattr(d, "__html__"): -        value = list(value) -        do_escape = False - -        for idx, item in enumerate(value): -            if hasattr(item, "__html__"): -                do_escape = True -            else: -                value[idx] = str(item) - -        if do_escape: -            d = escape(d) -        else: -            d = str(d) - -        return d.join(value) - -    # no html involved, to normal joining -    return soft_str(d).join(map(soft_str, value)) - - -@async_variant(sync_do_join)  # type: ignore -async def do_join( -    eval_ctx: "EvalContext", -    value: t.Union[t.AsyncIterable, t.Iterable], -    d: str = "", -    attribute: t.Optional[t.Union[str, int]] = None, -) -> str: -    return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute) - - -def do_center(value: str, width: int = 80) -> str: -    """Centers the value in a field of a given width.""" -    return soft_str(value).center(width) - - -@pass_environment -def sync_do_first( -    environment: "Environment", seq: "t.Iterable[V]" -) -> "t.Union[V, Undefined]": -    """Return the first item of a sequence.""" -    try: -        return next(iter(seq)) -    except StopIteration: -        return environment.undefined("No first item, sequence was empty.") - - -@async_variant(sync_do_first)  # type: ignore -async def do_first( -    environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]" -) -> "t.Union[V, Undefined]": -    try: -        return await auto_aiter(seq).__anext__() -    except StopAsyncIteration: -        return environment.undefined("No first item, sequence was empty.") - - -@pass_environment -def do_last( -    environment: "Environment", seq: "t.Reversible[V]" -) -> "t.Union[V, Undefined]": -    """Return the last item of a sequence. - -    Note: Does not work with generators. You may want to explicitly -    convert it to a list: - -    .. sourcecode:: jinja - -        {{ data | selectattr('name', '==', 'Jinja') | list | last }} -    """ -    try: -        return next(iter(reversed(seq))) -    except StopIteration: -        return environment.undefined("No last item, sequence was empty.") - - -# No async do_last, it may not be safe in async mode. - - -@pass_context -def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]": -    """Return a random item from the sequence.""" -    try: -        return random.choice(seq) -    except IndexError: -        return context.environment.undefined("No random item, sequence was empty.") - - -def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str: -    """Format the value like a 'human-readable' file size (i.e. 13 kB, -    4.1 MB, 102 Bytes, etc).  Per default decimal prefixes are used (Mega, -    Giga, etc.), if the second parameter is set to `True` the binary -    prefixes are used (Mebi, Gibi). -    """ -    bytes = float(value) -    base = 1024 if binary else 1000 -    prefixes = [ -        ("KiB" if binary else "kB"), -        ("MiB" if binary else "MB"), -        ("GiB" if binary else "GB"), -        ("TiB" if binary else "TB"), -        ("PiB" if binary else "PB"), -        ("EiB" if binary else "EB"), -        ("ZiB" if binary else "ZB"), -        ("YiB" if binary else "YB"), -    ] - -    if bytes == 1: -        return "1 Byte" -    elif bytes < base: -        return f"{int(bytes)} Bytes" -    else: -        for i, prefix in enumerate(prefixes): -            unit = base ** (i + 2) - -            if bytes < unit: -                return f"{base * bytes / unit:.1f} {prefix}" - -        return f"{base * bytes / unit:.1f} {prefix}" - - -def do_pprint(value: t.Any) -> str: -    """Pretty print a variable. Useful for debugging.""" -    return pformat(value) - - -_uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$") - - -@pass_eval_context -def do_urlize( -    eval_ctx: "EvalContext", -    value: str, -    trim_url_limit: t.Optional[int] = None, -    nofollow: bool = False, -    target: t.Optional[str] = None, -    rel: t.Optional[str] = None, -    extra_schemes: t.Optional[t.Iterable[str]] = None, -) -> str: -    """Convert URLs in text into clickable links. - -    This may not recognize links in some situations. Usually, a more -    comprehensive formatter, such as a Markdown library, is a better -    choice. - -    Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email -    addresses. Links with trailing punctuation (periods, commas, closing -    parentheses) and leading punctuation (opening parentheses) are -    recognized excluding the punctuation. Email addresses that include -    header fields are not recognized (for example, -    ``mailto:address@example.com?cc=copy@example.com``). - -    :param value: Original text containing URLs to link. -    :param trim_url_limit: Shorten displayed URL values to this length. -    :param nofollow: Add the ``rel=nofollow`` attribute to links. -    :param target: Add the ``target`` attribute to links. -    :param rel: Add the ``rel`` attribute to links. -    :param extra_schemes: Recognize URLs that start with these schemes -        in addition to the default behavior. Defaults to -        ``env.policies["urlize.extra_schemes"]``, which defaults to no -        extra schemes. - -    .. versionchanged:: 3.0 -        The ``extra_schemes`` parameter was added. - -    .. versionchanged:: 3.0 -        Generate ``https://`` links for URLs without a scheme. - -    .. versionchanged:: 3.0 -        The parsing rules were updated. Recognize email addresses with -        or without the ``mailto:`` scheme. Validate IP addresses. Ignore -        parentheses and brackets in more cases. - -    .. versionchanged:: 2.8 -       The ``target`` parameter was added. -    """ -    policies = eval_ctx.environment.policies -    rel_parts = set((rel or "").split()) - -    if nofollow: -        rel_parts.add("nofollow") - -    rel_parts.update((policies["urlize.rel"] or "").split()) -    rel = " ".join(sorted(rel_parts)) or None - -    if target is None: -        target = policies["urlize.target"] - -    if extra_schemes is None: -        extra_schemes = policies["urlize.extra_schemes"] or () - -    for scheme in extra_schemes: -        if _uri_scheme_re.fullmatch(scheme) is None: -            raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.") - -    rv = urlize( -        value, -        trim_url_limit=trim_url_limit, -        rel=rel, -        target=target, -        extra_schemes=extra_schemes, -    ) - -    if eval_ctx.autoescape: -        rv = Markup(rv) - -    return rv - - -def do_indent( -    s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False -) -> str: -    """Return a copy of the string with each line indented by 4 spaces. The -    first line and blank lines are not indented by default. - -    :param width: Number of spaces, or a string, to indent by. -    :param first: Don't skip indenting the first line. -    :param blank: Don't skip indenting empty lines. - -    .. versionchanged:: 3.0 -        ``width`` can be a string. - -    .. versionchanged:: 2.10 -        Blank lines are not indented by default. - -        Rename the ``indentfirst`` argument to ``first``. -    """ -    if isinstance(width, str): -        indention = width -    else: -        indention = " " * width - -    newline = "\n" - -    if isinstance(s, Markup): -        indention = Markup(indention) -        newline = Markup(newline) - -    s += newline  # this quirk is necessary for splitlines method - -    if blank: -        rv = (newline + indention).join(s.splitlines()) -    else: -        lines = s.splitlines() -        rv = lines.pop(0) - -        if lines: -            rv += newline + newline.join( -                indention + line if line else line for line in lines -            ) - -    if first: -        rv = indention + rv - -    return rv - - -@pass_environment -def do_truncate( -    env: "Environment", -    s: str, -    length: int = 255, -    killwords: bool = False, -    end: str = "...", -    leeway: t.Optional[int] = None, -) -> str: -    """Return a truncated copy of the string. The length is specified -    with the first parameter which defaults to ``255``. If the second -    parameter is ``true`` the filter will cut the text at length. Otherwise -    it will discard the last word. If the text was in fact -    truncated it will append an ellipsis sign (``"..."``). If you want a -    different ellipsis sign than ``"..."`` you can specify it using the -    third parameter. Strings that only exceed the length by the tolerance -    margin given in the fourth parameter will not be truncated. - -    .. sourcecode:: jinja - -        {{ "foo bar baz qux"|truncate(9) }} -            -> "foo..." -        {{ "foo bar baz qux"|truncate(9, True) }} -            -> "foo ba..." -        {{ "foo bar baz qux"|truncate(11) }} -            -> "foo bar baz qux" -        {{ "foo bar baz qux"|truncate(11, False, '...', 0) }} -            -> "foo bar..." - -    The default leeway on newer Jinja versions is 5 and was 0 before but -    can be reconfigured globally. -    """ -    if leeway is None: -        leeway = env.policies["truncate.leeway"] - -    assert length >= len(end), f"expected length >= {len(end)}, got {length}" -    assert leeway >= 0, f"expected leeway >= 0, got {leeway}" - -    if len(s) <= length + leeway: -        return s - -    if killwords: -        return s[: length - len(end)] + end - -    result = s[: length - len(end)].rsplit(" ", 1)[0] -    return result + end - - -@pass_environment -def do_wordwrap( -    environment: "Environment", -    s: str, -    width: int = 79, -    break_long_words: bool = True, -    wrapstring: t.Optional[str] = None, -    break_on_hyphens: bool = True, -) -> str: -    """Wrap a string to the given width. Existing newlines are treated -    as paragraphs to be wrapped separately. - -    :param s: Original text to wrap. -    :param width: Maximum length of wrapped lines. -    :param break_long_words: If a word is longer than ``width``, break -        it across lines. -    :param break_on_hyphens: If a word contains hyphens, it may be split -        across lines. -    :param wrapstring: String to join each wrapped line. Defaults to -        :attr:`Environment.newline_sequence`. - -    .. versionchanged:: 2.11 -        Existing newlines are treated as paragraphs wrapped separately. - -    .. versionchanged:: 2.11 -        Added the ``break_on_hyphens`` parameter. - -    .. versionchanged:: 2.7 -        Added the ``wrapstring`` parameter. -    """ -    import textwrap - -    if wrapstring is None: -        wrapstring = environment.newline_sequence - -    # textwrap.wrap doesn't consider existing newlines when wrapping. -    # If the string has a newline before width, wrap will still insert -    # a newline at width, resulting in a short line. Instead, split and -    # wrap each paragraph individually. -    return wrapstring.join( -        [ -            wrapstring.join( -                textwrap.wrap( -                    line, -                    width=width, -                    expand_tabs=False, -                    replace_whitespace=False, -                    break_long_words=break_long_words, -                    break_on_hyphens=break_on_hyphens, -                ) -            ) -            for line in s.splitlines() -        ] -    ) - - -_word_re = re.compile(r"\w+") - - -def do_wordcount(s: str) -> int: -    """Count the words in that string.""" -    return len(_word_re.findall(soft_str(s))) - - -def do_int(value: t.Any, default: int = 0, base: int = 10) -> int: -    """Convert the value into an integer. If the -    conversion doesn't work it will return ``0``. You can -    override this default using the first parameter. You -    can also override the default base (10) in the second -    parameter, which handles input with prefixes such as -    0b, 0o and 0x for bases 2, 8 and 16 respectively. -    The base is ignored for decimal numbers and non-string values. -    """ -    try: -        if isinstance(value, str): -            return int(value, base) - -        return int(value) -    except (TypeError, ValueError): -        # this quirk is necessary so that "42.23"|int gives 42. -        try: -            return int(float(value)) -        except (TypeError, ValueError): -            return default - - -def do_float(value: t.Any, default: float = 0.0) -> float: -    """Convert the value into a floating point number. If the -    conversion doesn't work it will return ``0.0``. You can -    override this default using the first parameter. -    """ -    try: -        return float(value) -    except (TypeError, ValueError): -        return default - - -def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str: -    """Apply the given values to a `printf-style`_ format string, like -    ``string % values``. - -    .. sourcecode:: jinja - -        {{ "%s, %s!"|format(greeting, name) }} -        Hello, World! - -    In most cases it should be more convenient and efficient to use the -    ``%`` operator or :meth:`str.format`. - -    .. code-block:: text - -        {{ "%s, %s!" % (greeting, name) }} -        {{ "{}, {}!".format(greeting, name) }} - -    .. _printf-style: https://docs.python.org/library/stdtypes.html -        #printf-style-string-formatting -    """ -    if args and kwargs: -        raise FilterArgumentError( -            "can't handle positional and keyword arguments at the same time" -        ) - -    return soft_str(value) % (kwargs or args) - - -def do_trim(value: str, chars: t.Optional[str] = None) -> str: -    """Strip leading and trailing characters, by default whitespace.""" -    return soft_str(value).strip(chars) - - -def do_striptags(value: "t.Union[str, HasHTML]") -> str: -    """Strip SGML/XML tags and replace adjacent whitespace by one space.""" -    if hasattr(value, "__html__"): -        value = t.cast("HasHTML", value).__html__() - -    return Markup(str(value)).striptags() - - -def sync_do_slice( -    value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None -) -> "t.Iterator[t.List[V]]": -    """Slice an iterator and return a list of lists containing -    those items. Useful if you want to create a div containing -    three ul tags that represent columns: - -    .. sourcecode:: html+jinja - -        <div class="columnwrapper"> -          {%- for column in items|slice(3) %} -            <ul class="column-{{ loop.index }}"> -            {%- for item in column %} -              <li>{{ item }}</li> -            {%- endfor %} -            </ul> -          {%- endfor %} -        </div> - -    If you pass it a second argument it's used to fill missing -    values on the last iteration. -    """ -    seq = list(value) -    length = len(seq) -    items_per_slice = length // slices -    slices_with_extra = length % slices -    offset = 0 - -    for slice_number in range(slices): -        start = offset + slice_number * items_per_slice - -        if slice_number < slices_with_extra: -            offset += 1 - -        end = offset + (slice_number + 1) * items_per_slice -        tmp = seq[start:end] - -        if fill_with is not None and slice_number >= slices_with_extra: -            tmp.append(fill_with) - -        yield tmp - - -@async_variant(sync_do_slice)  # type: ignore -async def do_slice( -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    slices: int, -    fill_with: t.Optional[t.Any] = None, -) -> "t.Iterator[t.List[V]]": -    return sync_do_slice(await auto_to_list(value), slices, fill_with) - - -def do_batch( -    value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None -) -> "t.Iterator[t.List[V]]": -    """ -    A filter that batches items. It works pretty much like `slice` -    just the other way round. It returns a list of lists with the -    given number of items. If you provide a second parameter this -    is used to fill up missing items. See this example: - -    .. sourcecode:: html+jinja - -        <table> -        {%- for row in items|batch(3, ' ') %} -          <tr> -          {%- for column in row %} -            <td>{{ column }}</td> -          {%- endfor %} -          </tr> -        {%- endfor %} -        </table> -    """ -    tmp: "t.List[V]" = [] - -    for item in value: -        if len(tmp) == linecount: -            yield tmp -            tmp = [] - -        tmp.append(item) - -    if tmp: -        if fill_with is not None and len(tmp) < linecount: -            tmp += [fill_with] * (linecount - len(tmp)) - -        yield tmp - - -def do_round( -    value: float, -    precision: int = 0, -    method: 'te.Literal["common", "ceil", "floor"]' = "common", -) -> float: -    """Round the number to a given precision. The first -    parameter specifies the precision (default is ``0``), the -    second the rounding method: - -    - ``'common'`` rounds either up or down -    - ``'ceil'`` always rounds up -    - ``'floor'`` always rounds down - -    If you don't specify a method ``'common'`` is used. - -    .. sourcecode:: jinja - -        {{ 42.55|round }} -            -> 43.0 -        {{ 42.55|round(1, 'floor') }} -            -> 42.5 - -    Note that even if rounded to 0 precision, a float is returned.  If -    you need a real integer, pipe it through `int`: - -    .. sourcecode:: jinja - -        {{ 42.55|round|int }} -            -> 43 -    """ -    if method not in {"common", "ceil", "floor"}: -        raise FilterArgumentError("method must be common, ceil or floor") - -    if method == "common": -        return round(value, precision) - -    func = getattr(math, method) -    return t.cast(float, func(value * (10**precision)) / (10**precision)) - - -class _GroupTuple(t.NamedTuple): -    grouper: t.Any -    list: t.List - -    # Use the regular tuple repr to hide this subclass if users print -    # out the value during debugging. -    def __repr__(self) -> str: -        return tuple.__repr__(self) - -    def __str__(self) -> str: -        return tuple.__str__(self) - - -@pass_environment -def sync_do_groupby( -    environment: "Environment", -    value: "t.Iterable[V]", -    attribute: t.Union[str, int], -    default: t.Optional[t.Any] = None, -    case_sensitive: bool = False, -) -> "t.List[_GroupTuple]": -    """Group a sequence of objects by an attribute using Python's -    :func:`itertools.groupby`. The attribute can use dot notation for -    nested access, like ``"address.city"``. Unlike Python's ``groupby``, -    the values are sorted first so only one group is returned for each -    unique value. - -    For example, a list of ``User`` objects with a ``city`` attribute -    can be rendered in groups. In this example, ``grouper`` refers to -    the ``city`` value of the group. - -    .. sourcecode:: html+jinja - -        <ul>{% for city, items in users|groupby("city") %} -          <li>{{ city }} -            <ul>{% for user in items %} -              <li>{{ user.name }} -            {% endfor %}</ul> -          </li> -        {% endfor %}</ul> - -    ``groupby`` yields namedtuples of ``(grouper, list)``, which -    can be used instead of the tuple unpacking above. ``grouper`` is the -    value of the attribute, and ``list`` is the items with that value. - -    .. sourcecode:: html+jinja - -        <ul>{% for group in users|groupby("city") %} -          <li>{{ group.grouper }}: {{ group.list|join(", ") }} -        {% endfor %}</ul> - -    You can specify a ``default`` value to use if an object in the list -    does not have the given attribute. - -    .. sourcecode:: jinja - -        <ul>{% for city, items in users|groupby("city", default="NY") %} -          <li>{{ city }}: {{ items|map(attribute="name")|join(", ") }}</li> -        {% endfor %}</ul> - -    Like the :func:`~jinja-filters.sort` filter, sorting and grouping is -    case-insensitive by default. The ``key`` for each group will have -    the case of the first item in that group of values. For example, if -    a list of users has cities ``["CA", "NY", "ca"]``, the "CA" group -    will have two values. This can be disabled by passing -    ``case_sensitive=True``. - -    .. versionchanged:: 3.1 -        Added the ``case_sensitive`` parameter. Sorting and grouping is -        case-insensitive by default, matching other filters that do -        comparisons. - -    .. versionchanged:: 3.0 -        Added the ``default`` parameter. - -    .. versionchanged:: 2.6 -        The attribute supports dot notation for nested access. -    """ -    expr = make_attrgetter( -        environment, -        attribute, -        postprocess=ignore_case if not case_sensitive else None, -        default=default, -    ) -    out = [ -        _GroupTuple(key, list(values)) -        for key, values in groupby(sorted(value, key=expr), expr) -    ] - -    if not case_sensitive: -        # Return the real key from the first value instead of the lowercase key. -        output_expr = make_attrgetter(environment, attribute, default=default) -        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] - -    return out - - -@async_variant(sync_do_groupby)  # type: ignore -async def do_groupby( -    environment: "Environment", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    attribute: t.Union[str, int], -    default: t.Optional[t.Any] = None, -    case_sensitive: bool = False, -) -> "t.List[_GroupTuple]": -    expr = make_attrgetter( -        environment, -        attribute, -        postprocess=ignore_case if not case_sensitive else None, -        default=default, -    ) -    out = [ -        _GroupTuple(key, await auto_to_list(values)) -        for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr) -    ] - -    if not case_sensitive: -        # Return the real key from the first value instead of the lowercase key. -        output_expr = make_attrgetter(environment, attribute, default=default) -        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] - -    return out - - -@pass_environment -def sync_do_sum( -    environment: "Environment", -    iterable: "t.Iterable[V]", -    attribute: t.Optional[t.Union[str, int]] = None, -    start: V = 0,  # type: ignore -) -> V: -    """Returns the sum of a sequence of numbers plus the value of parameter -    'start' (which defaults to 0).  When the sequence is empty it returns -    start. - -    It is also possible to sum up only certain attributes: - -    .. sourcecode:: jinja - -        Total: {{ items|sum(attribute='price') }} - -    .. versionchanged:: 2.6 -       The ``attribute`` parameter was added to allow summing up over -       attributes.  Also the ``start`` parameter was moved on to the right. -    """ -    if attribute is not None: -        iterable = map(make_attrgetter(environment, attribute), iterable) - -    return sum(iterable, start)  # type: ignore[no-any-return, call-overload] - - -@async_variant(sync_do_sum)  # type: ignore -async def do_sum( -    environment: "Environment", -    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    attribute: t.Optional[t.Union[str, int]] = None, -    start: V = 0,  # type: ignore -) -> V: -    rv = start - -    if attribute is not None: -        func = make_attrgetter(environment, attribute) -    else: - -        def func(x: V) -> V: -            return x - -    async for item in auto_aiter(iterable): -        rv += func(item) - -    return rv - - -def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]": -    """Convert the value into a list.  If it was a string the returned list -    will be a list of characters. -    """ -    return list(value) - - -@async_variant(sync_do_list)  # type: ignore -async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]": -    return await auto_to_list(value) - - -def do_mark_safe(value: str) -> Markup: -    """Mark the value as safe which means that in an environment with automatic -    escaping enabled this variable will not be escaped. -    """ -    return Markup(value) - - -def do_mark_unsafe(value: str) -> str: -    """Mark a value as unsafe.  This is the reverse operation for :func:`safe`.""" -    return str(value) - - -@typing.overload -def do_reverse(value: str) -> str: -    ... - - -@typing.overload -def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]": -    ... - - -def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]: -    """Reverse the object or return an iterator that iterates over it the other -    way round. -    """ -    if isinstance(value, str): -        return value[::-1] - -    try: -        return reversed(value)  # type: ignore -    except TypeError: -        try: -            rv = list(value) -            rv.reverse() -            return rv -        except TypeError as e: -            raise FilterArgumentError("argument must be iterable") from e - - -@pass_environment -def do_attr( -    environment: "Environment", obj: t.Any, name: str -) -> t.Union[Undefined, t.Any]: -    """Get an attribute of an object.  ``foo|attr("bar")`` works like -    ``foo.bar`` just that always an attribute is returned and items are not -    looked up. - -    See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details. -    """ -    try: -        name = str(name) -    except UnicodeError: -        pass -    else: -        try: -            value = getattr(obj, name) -        except AttributeError: -            pass -        else: -            if environment.sandboxed: -                environment = t.cast("SandboxedEnvironment", environment) - -                if not environment.is_safe_attribute(obj, name, value): -                    return environment.unsafe_undefined(obj, name) - -            return value - -    return environment.undefined(obj=obj, name=name) - - -@typing.overload -def sync_do_map( -    context: "Context", value: t.Iterable, name: str, *args: t.Any, **kwargs: t.Any -) -> t.Iterable: -    ... - - -@typing.overload -def sync_do_map( -    context: "Context", -    value: t.Iterable, -    *, -    attribute: str = ..., -    default: t.Optional[t.Any] = None, -) -> t.Iterable: -    ... - - -@pass_context -def sync_do_map( -    context: "Context", value: t.Iterable, *args: t.Any, **kwargs: t.Any -) -> t.Iterable: -    """Applies a filter on a sequence of objects or looks up an attribute. -    This is useful when dealing with lists of objects but you are really -    only interested in a certain value of it. - -    The basic usage is mapping on an attribute.  Imagine you have a list -    of users but you are only interested in a list of usernames: - -    .. sourcecode:: jinja - -        Users on this page: {{ users|map(attribute='username')|join(', ') }} - -    You can specify a ``default`` value to use if an object in the list -    does not have the given attribute. - -    .. sourcecode:: jinja - -        {{ users|map(attribute="username", default="Anonymous")|join(", ") }} - -    Alternatively you can let it invoke a filter by passing the name of the -    filter and the arguments afterwards.  A good example would be applying a -    text conversion filter on a sequence: - -    .. sourcecode:: jinja - -        Users on this page: {{ titles|map('lower')|join(', ') }} - -    Similar to a generator comprehension such as: - -    .. code-block:: python - -        (u.username for u in users) -        (getattr(u, "username", "Anonymous") for u in users) -        (do_lower(x) for x in titles) - -    .. versionchanged:: 2.11.0 -        Added the ``default`` parameter. - -    .. versionadded:: 2.7 -    """ -    if value: -        func = prepare_map(context, args, kwargs) - -        for item in value: -            yield func(item) - - -@typing.overload -def do_map( -    context: "Context", -    value: t.Union[t.AsyncIterable, t.Iterable], -    name: str, -    *args: t.Any, -    **kwargs: t.Any, -) -> t.Iterable: -    ... - - -@typing.overload -def do_map( -    context: "Context", -    value: t.Union[t.AsyncIterable, t.Iterable], -    *, -    attribute: str = ..., -    default: t.Optional[t.Any] = None, -) -> t.Iterable: -    ... - - -@async_variant(sync_do_map)  # type: ignore -async def do_map( -    context: "Context", -    value: t.Union[t.AsyncIterable, t.Iterable], -    *args: t.Any, -    **kwargs: t.Any, -) -> t.AsyncIterable: -    if value: -        func = prepare_map(context, args, kwargs) - -        async for item in auto_aiter(value): -            yield await auto_await(func(item)) - - -@pass_context -def sync_do_select( -    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any -) -> "t.Iterator[V]": -    """Filters a sequence of objects by applying a test to each object, -    and only selecting the objects with the test succeeding. - -    If no test is specified, each object will be evaluated as a boolean. - -    Example usage: - -    .. sourcecode:: jinja - -        {{ numbers|select("odd") }} -        {{ numbers|select("odd") }} -        {{ numbers|select("divisibleby", 3) }} -        {{ numbers|select("lessthan", 42) }} -        {{ strings|select("equalto", "mystring") }} - -    Similar to a generator comprehension such as: - -    .. code-block:: python - -        (n for n in numbers if test_odd(n)) -        (n for n in numbers if test_divisibleby(n, 3)) - -    .. versionadded:: 2.7 -    """ -    return select_or_reject(context, value, args, kwargs, lambda x: x, False) - - -@async_variant(sync_do_select)  # type: ignore -async def do_select( -    context: "Context", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    *args: t.Any, -    **kwargs: t.Any, -) -> "t.AsyncIterator[V]": -    return async_select_or_reject(context, value, args, kwargs, lambda x: x, False) - - -@pass_context -def sync_do_reject( -    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any -) -> "t.Iterator[V]": -    """Filters a sequence of objects by applying a test to each object, -    and rejecting the objects with the test succeeding. - -    If no test is specified, each object will be evaluated as a boolean. - -    Example usage: - -    .. sourcecode:: jinja - -        {{ numbers|reject("odd") }} - -    Similar to a generator comprehension such as: - -    .. code-block:: python - -        (n for n in numbers if not test_odd(n)) - -    .. versionadded:: 2.7 -    """ -    return select_or_reject(context, value, args, kwargs, lambda x: not x, False) - - -@async_variant(sync_do_reject)  # type: ignore -async def do_reject( -    context: "Context", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    *args: t.Any, -    **kwargs: t.Any, -) -> "t.AsyncIterator[V]": -    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False) - - -@pass_context -def sync_do_selectattr( -    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any -) -> "t.Iterator[V]": -    """Filters a sequence of objects by applying a test to the specified -    attribute of each object, and only selecting the objects with the -    test succeeding. - -    If no test is specified, the attribute's value will be evaluated as -    a boolean. - -    Example usage: - -    .. sourcecode:: jinja - -        {{ users|selectattr("is_active") }} -        {{ users|selectattr("email", "none") }} - -    Similar to a generator comprehension such as: - -    .. code-block:: python - -        (u for user in users if user.is_active) -        (u for user in users if test_none(user.email)) - -    .. versionadded:: 2.7 -    """ -    return select_or_reject(context, value, args, kwargs, lambda x: x, True) - - -@async_variant(sync_do_selectattr)  # type: ignore -async def do_selectattr( -    context: "Context", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    *args: t.Any, -    **kwargs: t.Any, -) -> "t.AsyncIterator[V]": -    return async_select_or_reject(context, value, args, kwargs, lambda x: x, True) - - -@pass_context -def sync_do_rejectattr( -    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any -) -> "t.Iterator[V]": -    """Filters a sequence of objects by applying a test to the specified -    attribute of each object, and rejecting the objects with the test -    succeeding. - -    If no test is specified, the attribute's value will be evaluated as -    a boolean. - -    .. sourcecode:: jinja - -        {{ users|rejectattr("is_active") }} -        {{ users|rejectattr("email", "none") }} - -    Similar to a generator comprehension such as: - -    .. code-block:: python - -        (u for user in users if not user.is_active) -        (u for user in users if not test_none(user.email)) - -    .. versionadded:: 2.7 -    """ -    return select_or_reject(context, value, args, kwargs, lambda x: not x, True) - - -@async_variant(sync_do_rejectattr)  # type: ignore -async def do_rejectattr( -    context: "Context", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    *args: t.Any, -    **kwargs: t.Any, -) -> "t.AsyncIterator[V]": -    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True) - - -@pass_eval_context -def do_tojson( -    eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None -) -> Markup: -    """Serialize an object to a string of JSON, and mark it safe to -    render in HTML. This filter is only for use in HTML documents. - -    The returned string is safe to render in HTML documents and -    ``<script>`` tags. The exception is in HTML attributes that are -    double quoted; either use single quotes or the ``|forceescape`` -    filter. - -    :param value: The object to serialize to JSON. -    :param indent: The ``indent`` parameter passed to ``dumps``, for -        pretty-printing the value. - -    .. versionadded:: 2.9 -    """ -    policies = eval_ctx.environment.policies -    dumps = policies["json.dumps_function"] -    kwargs = policies["json.dumps_kwargs"] - -    if indent is not None: -        kwargs = kwargs.copy() -        kwargs["indent"] = indent - -    return htmlsafe_json_dumps(value, dumps=dumps, **kwargs) - - -def prepare_map( -    context: "Context", args: t.Tuple, kwargs: t.Dict[str, t.Any] -) -> t.Callable[[t.Any], t.Any]: -    if not args and "attribute" in kwargs: -        attribute = kwargs.pop("attribute") -        default = kwargs.pop("default", None) - -        if kwargs: -            raise FilterArgumentError( -                f"Unexpected keyword argument {next(iter(kwargs))!r}" -            ) - -        func = make_attrgetter(context.environment, attribute, default=default) -    else: -        try: -            name = args[0] -            args = args[1:] -        except LookupError: -            raise FilterArgumentError("map requires a filter argument") from None - -        def func(item: t.Any) -> t.Any: -            return context.environment.call_filter( -                name, item, args, kwargs, context=context -            ) - -    return func - - -def prepare_select_or_reject( -    context: "Context", -    args: t.Tuple, -    kwargs: t.Dict[str, t.Any], -    modfunc: t.Callable[[t.Any], t.Any], -    lookup_attr: bool, -) -> t.Callable[[t.Any], t.Any]: -    if lookup_attr: -        try: -            attr = args[0] -        except LookupError: -            raise FilterArgumentError("Missing parameter for attribute name") from None - -        transfunc = make_attrgetter(context.environment, attr) -        off = 1 -    else: -        off = 0 - -        def transfunc(x: V) -> V: -            return x - -    try: -        name = args[off] -        args = args[1 + off :] - -        def func(item: t.Any) -> t.Any: -            return context.environment.call_test(name, item, args, kwargs) - -    except LookupError: -        func = bool  # type: ignore - -    return lambda item: modfunc(func(transfunc(item))) - - -def select_or_reject( -    context: "Context", -    value: "t.Iterable[V]", -    args: t.Tuple, -    kwargs: t.Dict[str, t.Any], -    modfunc: t.Callable[[t.Any], t.Any], -    lookup_attr: bool, -) -> "t.Iterator[V]": -    if value: -        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) - -        for item in value: -            if func(item): -                yield item - - -async def async_select_or_reject( -    context: "Context", -    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -    args: t.Tuple, -    kwargs: t.Dict[str, t.Any], -    modfunc: t.Callable[[t.Any], t.Any], -    lookup_attr: bool, -) -> "t.AsyncIterator[V]": -    if value: -        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) - -        async for item in auto_aiter(value): -            if func(item): -                yield item - - -FILTERS = { -    "abs": abs, -    "attr": do_attr, -    "batch": do_batch, -    "capitalize": do_capitalize, -    "center": do_center, -    "count": len, -    "d": do_default, -    "default": do_default, -    "dictsort": do_dictsort, -    "e": escape, -    "escape": escape, -    "filesizeformat": do_filesizeformat, -    "first": do_first, -    "float": do_float, -    "forceescape": do_forceescape, -    "format": do_format, -    "groupby": do_groupby, -    "indent": do_indent, -    "int": do_int, -    "join": do_join, -    "last": do_last, -    "length": len, -    "list": do_list, -    "lower": do_lower, -    "items": do_items, -    "map": do_map, -    "min": do_min, -    "max": do_max, -    "pprint": do_pprint, -    "random": do_random, -    "reject": do_reject, -    "rejectattr": do_rejectattr, -    "replace": do_replace, -    "reverse": do_reverse, -    "round": do_round, -    "safe": do_mark_safe, -    "select": do_select, -    "selectattr": do_selectattr, -    "slice": do_slice, -    "sort": do_sort, -    "string": soft_str, -    "striptags": do_striptags, -    "sum": do_sum, -    "title": do_title, -    "trim": do_trim, -    "truncate": do_truncate, -    "unique": do_unique, -    "upper": do_upper, -    "urlencode": do_urlencode, -    "urlize": do_urlize, -    "wordcount": do_wordcount, -    "wordwrap": do_wordwrap, -    "xmlattr": do_xmlattr, -    "tojson": do_tojson, -} diff --git a/venv/lib/python3.11/site-packages/jinja2/idtracking.py b/venv/lib/python3.11/site-packages/jinja2/idtracking.py deleted file mode 100644 index 995ebaa..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/idtracking.py +++ /dev/null @@ -1,318 +0,0 @@ -import typing as t - -from . import nodes -from .visitor import NodeVisitor - -VAR_LOAD_PARAMETER = "param" -VAR_LOAD_RESOLVE = "resolve" -VAR_LOAD_ALIAS = "alias" -VAR_LOAD_UNDEFINED = "undefined" - - -def find_symbols( -    nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None -) -> "Symbols": -    sym = Symbols(parent=parent_symbols) -    visitor = FrameSymbolVisitor(sym) -    for node in nodes: -        visitor.visit(node) -    return sym - - -def symbols_for_node( -    node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None -) -> "Symbols": -    sym = Symbols(parent=parent_symbols) -    sym.analyze_node(node) -    return sym - - -class Symbols: -    def __init__( -        self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None -    ) -> None: -        if level is None: -            if parent is None: -                level = 0 -            else: -                level = parent.level + 1 - -        self.level: int = level -        self.parent = parent -        self.refs: t.Dict[str, str] = {} -        self.loads: t.Dict[str, t.Any] = {} -        self.stores: t.Set[str] = set() - -    def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None: -        visitor = RootVisitor(self) -        visitor.visit(node, **kwargs) - -    def _define_ref( -        self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None -    ) -> str: -        ident = f"l_{self.level}_{name}" -        self.refs[name] = ident -        if load is not None: -            self.loads[ident] = load -        return ident - -    def find_load(self, target: str) -> t.Optional[t.Any]: -        if target in self.loads: -            return self.loads[target] - -        if self.parent is not None: -            return self.parent.find_load(target) - -        return None - -    def find_ref(self, name: str) -> t.Optional[str]: -        if name in self.refs: -            return self.refs[name] - -        if self.parent is not None: -            return self.parent.find_ref(name) - -        return None - -    def ref(self, name: str) -> str: -        rv = self.find_ref(name) -        if rv is None: -            raise AssertionError( -                "Tried to resolve a name to a reference that was" -                f" unknown to the frame ({name!r})" -            ) -        return rv - -    def copy(self) -> "Symbols": -        rv = object.__new__(self.__class__) -        rv.__dict__.update(self.__dict__) -        rv.refs = self.refs.copy() -        rv.loads = self.loads.copy() -        rv.stores = self.stores.copy() -        return rv - -    def store(self, name: str) -> None: -        self.stores.add(name) - -        # If we have not see the name referenced yet, we need to figure -        # out what to set it to. -        if name not in self.refs: -            # If there is a parent scope we check if the name has a -            # reference there.  If it does it means we might have to alias -            # to a variable there. -            if self.parent is not None: -                outer_ref = self.parent.find_ref(name) -                if outer_ref is not None: -                    self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) -                    return - -            # Otherwise we can just set it to undefined. -            self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None)) - -    def declare_parameter(self, name: str) -> str: -        self.stores.add(name) -        return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None)) - -    def load(self, name: str) -> None: -        if self.find_ref(name) is None: -            self._define_ref(name, load=(VAR_LOAD_RESOLVE, name)) - -    def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None: -        stores: t.Dict[str, int] = {} -        for branch in branch_symbols: -            for target in branch.stores: -                if target in self.stores: -                    continue -                stores[target] = stores.get(target, 0) + 1 - -        for sym in branch_symbols: -            self.refs.update(sym.refs) -            self.loads.update(sym.loads) -            self.stores.update(sym.stores) - -        for name, branch_count in stores.items(): -            if branch_count == len(branch_symbols): -                continue - -            target = self.find_ref(name)  # type: ignore -            assert target is not None, "should not happen" - -            if self.parent is not None: -                outer_target = self.parent.find_ref(name) -                if outer_target is not None: -                    self.loads[target] = (VAR_LOAD_ALIAS, outer_target) -                    continue -            self.loads[target] = (VAR_LOAD_RESOLVE, name) - -    def dump_stores(self) -> t.Dict[str, str]: -        rv: t.Dict[str, str] = {} -        node: t.Optional["Symbols"] = self - -        while node is not None: -            for name in sorted(node.stores): -                if name not in rv: -                    rv[name] = self.find_ref(name)  # type: ignore - -            node = node.parent - -        return rv - -    def dump_param_targets(self) -> t.Set[str]: -        rv = set() -        node: t.Optional["Symbols"] = self - -        while node is not None: -            for target, (instr, _) in self.loads.items(): -                if instr == VAR_LOAD_PARAMETER: -                    rv.add(target) - -            node = node.parent - -        return rv - - -class RootVisitor(NodeVisitor): -    def __init__(self, symbols: "Symbols") -> None: -        self.sym_visitor = FrameSymbolVisitor(symbols) - -    def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None: -        for child in node.iter_child_nodes(): -            self.sym_visitor.visit(child) - -    visit_Template = _simple_visit -    visit_Block = _simple_visit -    visit_Macro = _simple_visit -    visit_FilterBlock = _simple_visit -    visit_Scope = _simple_visit -    visit_If = _simple_visit -    visit_ScopedEvalContextModifier = _simple_visit - -    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: -        for child in node.body: -            self.sym_visitor.visit(child) - -    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: -        for child in node.iter_child_nodes(exclude=("call",)): -            self.sym_visitor.visit(child) - -    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: -        for child in node.body: -            self.sym_visitor.visit(child) - -    def visit_For( -        self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any -    ) -> None: -        if for_branch == "body": -            self.sym_visitor.visit(node.target, store_as_param=True) -            branch = node.body -        elif for_branch == "else": -            branch = node.else_ -        elif for_branch == "test": -            self.sym_visitor.visit(node.target, store_as_param=True) -            if node.test is not None: -                self.sym_visitor.visit(node.test) -            return -        else: -            raise RuntimeError("Unknown for branch") - -        if branch: -            for item in branch: -                self.sym_visitor.visit(item) - -    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: -        for target in node.targets: -            self.sym_visitor.visit(target) -        for child in node.body: -            self.sym_visitor.visit(child) - -    def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None: -        raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}") - - -class FrameSymbolVisitor(NodeVisitor): -    """A visitor for `Frame.inspect`.""" - -    def __init__(self, symbols: "Symbols") -> None: -        self.symbols = symbols - -    def visit_Name( -        self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any -    ) -> None: -        """All assignments to names go through this function.""" -        if store_as_param or node.ctx == "param": -            self.symbols.declare_parameter(node.name) -        elif node.ctx == "store": -            self.symbols.store(node.name) -        elif node.ctx == "load": -            self.symbols.load(node.name) - -    def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None: -        self.symbols.load(node.name) - -    def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None: -        self.visit(node.test, **kwargs) -        original_symbols = self.symbols - -        def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols": -            self.symbols = rv = original_symbols.copy() - -            for subnode in nodes: -                self.visit(subnode, **kwargs) - -            self.symbols = original_symbols -            return rv - -        body_symbols = inner_visit(node.body) -        elif_symbols = inner_visit(node.elif_) -        else_symbols = inner_visit(node.else_ or ()) -        self.symbols.branch_update([body_symbols, elif_symbols, else_symbols]) - -    def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None: -        self.symbols.store(node.name) - -    def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None: -        self.generic_visit(node, **kwargs) -        self.symbols.store(node.target) - -    def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None: -        self.generic_visit(node, **kwargs) - -        for name in node.names: -            if isinstance(name, tuple): -                self.symbols.store(name[1]) -            else: -                self.symbols.store(name) - -    def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None: -        """Visit assignments in the correct order.""" -        self.visit(node.node, **kwargs) -        self.visit(node.target, **kwargs) - -    def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None: -        """Visiting stops at for blocks.  However the block sequence -        is visited as part of the outer scope. -        """ -        self.visit(node.iter, **kwargs) - -    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: -        self.visit(node.call, **kwargs) - -    def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None: -        self.visit(node.filter, **kwargs) - -    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: -        for target in node.values: -            self.visit(target) - -    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: -        """Stop visiting at block assigns.""" -        self.visit(node.target, **kwargs) - -    def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None: -        """Stop visiting at scopes.""" - -    def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None: -        """Stop visiting at blocks.""" - -    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: -        """Do not visit into overlay scopes.""" diff --git a/venv/lib/python3.11/site-packages/jinja2/lexer.py b/venv/lib/python3.11/site-packages/jinja2/lexer.py deleted file mode 100644 index aff7e9f..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/lexer.py +++ /dev/null @@ -1,866 +0,0 @@ -"""Implements a Jinja / Python combination lexer. The ``Lexer`` class -is used to do some preprocessing. It filters out invalid operators like -the bitshift operators we don't allow in templates. It separates -template code and python code in expressions. -""" -import re -import typing as t -from ast import literal_eval -from collections import deque -from sys import intern - -from ._identifier import pattern as name_re -from .exceptions import TemplateSyntaxError -from .utils import LRUCache - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment - -# cache for the lexers. Exists in order to be able to have multiple -# environments with the same lexer -_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50)  # type: ignore - -# static regular expressions -whitespace_re = re.compile(r"\s+") -newline_re = re.compile(r"(\r\n|\r|\n)") -string_re = re.compile( -    r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S -) -integer_re = re.compile( -    r""" -    ( -        0b(_?[0-1])+ # binary -    | -        0o(_?[0-7])+ # octal -    | -        0x(_?[\da-f])+ # hex -    | -        [1-9](_?\d)* # decimal -    | -        0(_?0)* # decimal zero -    ) -    """, -    re.IGNORECASE | re.VERBOSE, -) -float_re = re.compile( -    r""" -    (?<!\.)  # doesn't start with a . -    (\d+_)*\d+  # digits, possibly _ separated -    ( -        (\.(\d+_)*\d+)?  # optional fractional part -        e[+\-]?(\d+_)*\d+  # exponent part -    | -        \.(\d+_)*\d+  # required fractional part -    ) -    """, -    re.IGNORECASE | re.VERBOSE, -) - -# internal the tokens and keep references to them -TOKEN_ADD = intern("add") -TOKEN_ASSIGN = intern("assign") -TOKEN_COLON = intern("colon") -TOKEN_COMMA = intern("comma") -TOKEN_DIV = intern("div") -TOKEN_DOT = intern("dot") -TOKEN_EQ = intern("eq") -TOKEN_FLOORDIV = intern("floordiv") -TOKEN_GT = intern("gt") -TOKEN_GTEQ = intern("gteq") -TOKEN_LBRACE = intern("lbrace") -TOKEN_LBRACKET = intern("lbracket") -TOKEN_LPAREN = intern("lparen") -TOKEN_LT = intern("lt") -TOKEN_LTEQ = intern("lteq") -TOKEN_MOD = intern("mod") -TOKEN_MUL = intern("mul") -TOKEN_NE = intern("ne") -TOKEN_PIPE = intern("pipe") -TOKEN_POW = intern("pow") -TOKEN_RBRACE = intern("rbrace") -TOKEN_RBRACKET = intern("rbracket") -TOKEN_RPAREN = intern("rparen") -TOKEN_SEMICOLON = intern("semicolon") -TOKEN_SUB = intern("sub") -TOKEN_TILDE = intern("tilde") -TOKEN_WHITESPACE = intern("whitespace") -TOKEN_FLOAT = intern("float") -TOKEN_INTEGER = intern("integer") -TOKEN_NAME = intern("name") -TOKEN_STRING = intern("string") -TOKEN_OPERATOR = intern("operator") -TOKEN_BLOCK_BEGIN = intern("block_begin") -TOKEN_BLOCK_END = intern("block_end") -TOKEN_VARIABLE_BEGIN = intern("variable_begin") -TOKEN_VARIABLE_END = intern("variable_end") -TOKEN_RAW_BEGIN = intern("raw_begin") -TOKEN_RAW_END = intern("raw_end") -TOKEN_COMMENT_BEGIN = intern("comment_begin") -TOKEN_COMMENT_END = intern("comment_end") -TOKEN_COMMENT = intern("comment") -TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin") -TOKEN_LINESTATEMENT_END = intern("linestatement_end") -TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin") -TOKEN_LINECOMMENT_END = intern("linecomment_end") -TOKEN_LINECOMMENT = intern("linecomment") -TOKEN_DATA = intern("data") -TOKEN_INITIAL = intern("initial") -TOKEN_EOF = intern("eof") - -# bind operators to token types -operators = { -    "+": TOKEN_ADD, -    "-": TOKEN_SUB, -    "/": TOKEN_DIV, -    "//": TOKEN_FLOORDIV, -    "*": TOKEN_MUL, -    "%": TOKEN_MOD, -    "**": TOKEN_POW, -    "~": TOKEN_TILDE, -    "[": TOKEN_LBRACKET, -    "]": TOKEN_RBRACKET, -    "(": TOKEN_LPAREN, -    ")": TOKEN_RPAREN, -    "{": TOKEN_LBRACE, -    "}": TOKEN_RBRACE, -    "==": TOKEN_EQ, -    "!=": TOKEN_NE, -    ">": TOKEN_GT, -    ">=": TOKEN_GTEQ, -    "<": TOKEN_LT, -    "<=": TOKEN_LTEQ, -    "=": TOKEN_ASSIGN, -    ".": TOKEN_DOT, -    ":": TOKEN_COLON, -    "|": TOKEN_PIPE, -    ",": TOKEN_COMMA, -    ";": TOKEN_SEMICOLON, -} - -reverse_operators = {v: k for k, v in operators.items()} -assert len(operators) == len(reverse_operators), "operators dropped" -operator_re = re.compile( -    f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})" -) - -ignored_tokens = frozenset( -    [ -        TOKEN_COMMENT_BEGIN, -        TOKEN_COMMENT, -        TOKEN_COMMENT_END, -        TOKEN_WHITESPACE, -        TOKEN_LINECOMMENT_BEGIN, -        TOKEN_LINECOMMENT_END, -        TOKEN_LINECOMMENT, -    ] -) -ignore_if_empty = frozenset( -    [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT] -) - - -def _describe_token_type(token_type: str) -> str: -    if token_type in reverse_operators: -        return reverse_operators[token_type] - -    return { -        TOKEN_COMMENT_BEGIN: "begin of comment", -        TOKEN_COMMENT_END: "end of comment", -        TOKEN_COMMENT: "comment", -        TOKEN_LINECOMMENT: "comment", -        TOKEN_BLOCK_BEGIN: "begin of statement block", -        TOKEN_BLOCK_END: "end of statement block", -        TOKEN_VARIABLE_BEGIN: "begin of print statement", -        TOKEN_VARIABLE_END: "end of print statement", -        TOKEN_LINESTATEMENT_BEGIN: "begin of line statement", -        TOKEN_LINESTATEMENT_END: "end of line statement", -        TOKEN_DATA: "template data / text", -        TOKEN_EOF: "end of template", -    }.get(token_type, token_type) - - -def describe_token(token: "Token") -> str: -    """Returns a description of the token.""" -    if token.type == TOKEN_NAME: -        return token.value - -    return _describe_token_type(token.type) - - -def describe_token_expr(expr: str) -> str: -    """Like `describe_token` but for token expressions.""" -    if ":" in expr: -        type, value = expr.split(":", 1) - -        if type == TOKEN_NAME: -            return value -    else: -        type = expr - -    return _describe_token_type(type) - - -def count_newlines(value: str) -> int: -    """Count the number of newline characters in the string.  This is -    useful for extensions that filter a stream. -    """ -    return len(newline_re.findall(value)) - - -def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]: -    """Compiles all the rules from the environment into a list of rules.""" -    e = re.escape -    rules = [ -        ( -            len(environment.comment_start_string), -            TOKEN_COMMENT_BEGIN, -            e(environment.comment_start_string), -        ), -        ( -            len(environment.block_start_string), -            TOKEN_BLOCK_BEGIN, -            e(environment.block_start_string), -        ), -        ( -            len(environment.variable_start_string), -            TOKEN_VARIABLE_BEGIN, -            e(environment.variable_start_string), -        ), -    ] - -    if environment.line_statement_prefix is not None: -        rules.append( -            ( -                len(environment.line_statement_prefix), -                TOKEN_LINESTATEMENT_BEGIN, -                r"^[ \t\v]*" + e(environment.line_statement_prefix), -            ) -        ) -    if environment.line_comment_prefix is not None: -        rules.append( -            ( -                len(environment.line_comment_prefix), -                TOKEN_LINECOMMENT_BEGIN, -                r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix), -            ) -        ) - -    return [x[1:] for x in sorted(rules, reverse=True)] - - -class Failure: -    """Class that raises a `TemplateSyntaxError` if called. -    Used by the `Lexer` to specify known errors. -    """ - -    def __init__( -        self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError -    ) -> None: -        self.message = message -        self.error_class = cls - -    def __call__(self, lineno: int, filename: str) -> "te.NoReturn": -        raise self.error_class(self.message, lineno, filename) - - -class Token(t.NamedTuple): -    lineno: int -    type: str -    value: str - -    def __str__(self) -> str: -        return describe_token(self) - -    def test(self, expr: str) -> bool: -        """Test a token against a token expression.  This can either be a -        token type or ``'token_type:token_value'``.  This can only test -        against string values and types. -        """ -        # here we do a regular string equality check as test_any is usually -        # passed an iterable of not interned strings. -        if self.type == expr: -            return True - -        if ":" in expr: -            return expr.split(":", 1) == [self.type, self.value] - -        return False - -    def test_any(self, *iterable: str) -> bool: -        """Test against multiple token expressions.""" -        return any(self.test(expr) for expr in iterable) - - -class TokenStreamIterator: -    """The iterator for tokenstreams.  Iterate over the stream -    until the eof token is reached. -    """ - -    def __init__(self, stream: "TokenStream") -> None: -        self.stream = stream - -    def __iter__(self) -> "TokenStreamIterator": -        return self - -    def __next__(self) -> Token: -        token = self.stream.current - -        if token.type is TOKEN_EOF: -            self.stream.close() -            raise StopIteration - -        next(self.stream) -        return token - - -class TokenStream: -    """A token stream is an iterable that yields :class:`Token`\\s.  The -    parser however does not iterate over it but calls :meth:`next` to go -    one token ahead.  The current active token is stored as :attr:`current`. -    """ - -    def __init__( -        self, -        generator: t.Iterable[Token], -        name: t.Optional[str], -        filename: t.Optional[str], -    ): -        self._iter = iter(generator) -        self._pushed: "te.Deque[Token]" = deque() -        self.name = name -        self.filename = filename -        self.closed = False -        self.current = Token(1, TOKEN_INITIAL, "") -        next(self) - -    def __iter__(self) -> TokenStreamIterator: -        return TokenStreamIterator(self) - -    def __bool__(self) -> bool: -        return bool(self._pushed) or self.current.type is not TOKEN_EOF - -    @property -    def eos(self) -> bool: -        """Are we at the end of the stream?""" -        return not self - -    def push(self, token: Token) -> None: -        """Push a token back to the stream.""" -        self._pushed.append(token) - -    def look(self) -> Token: -        """Look at the next token.""" -        old_token = next(self) -        result = self.current -        self.push(result) -        self.current = old_token -        return result - -    def skip(self, n: int = 1) -> None: -        """Got n tokens ahead.""" -        for _ in range(n): -            next(self) - -    def next_if(self, expr: str) -> t.Optional[Token]: -        """Perform the token test and return the token if it matched. -        Otherwise the return value is `None`. -        """ -        if self.current.test(expr): -            return next(self) - -        return None - -    def skip_if(self, expr: str) -> bool: -        """Like :meth:`next_if` but only returns `True` or `False`.""" -        return self.next_if(expr) is not None - -    def __next__(self) -> Token: -        """Go one token ahead and return the old one. - -        Use the built-in :func:`next` instead of calling this directly. -        """ -        rv = self.current - -        if self._pushed: -            self.current = self._pushed.popleft() -        elif self.current.type is not TOKEN_EOF: -            try: -                self.current = next(self._iter) -            except StopIteration: -                self.close() - -        return rv - -    def close(self) -> None: -        """Close the stream.""" -        self.current = Token(self.current.lineno, TOKEN_EOF, "") -        self._iter = iter(()) -        self.closed = True - -    def expect(self, expr: str) -> Token: -        """Expect a given token type and return it.  This accepts the same -        argument as :meth:`jinja2.lexer.Token.test`. -        """ -        if not self.current.test(expr): -            expr = describe_token_expr(expr) - -            if self.current.type is TOKEN_EOF: -                raise TemplateSyntaxError( -                    f"unexpected end of template, expected {expr!r}.", -                    self.current.lineno, -                    self.name, -                    self.filename, -                ) - -            raise TemplateSyntaxError( -                f"expected token {expr!r}, got {describe_token(self.current)!r}", -                self.current.lineno, -                self.name, -                self.filename, -            ) - -        return next(self) - - -def get_lexer(environment: "Environment") -> "Lexer": -    """Return a lexer which is probably cached.""" -    key = ( -        environment.block_start_string, -        environment.block_end_string, -        environment.variable_start_string, -        environment.variable_end_string, -        environment.comment_start_string, -        environment.comment_end_string, -        environment.line_statement_prefix, -        environment.line_comment_prefix, -        environment.trim_blocks, -        environment.lstrip_blocks, -        environment.newline_sequence, -        environment.keep_trailing_newline, -    ) -    lexer = _lexer_cache.get(key) - -    if lexer is None: -        _lexer_cache[key] = lexer = Lexer(environment) - -    return lexer - - -class OptionalLStrip(tuple): -    """A special tuple for marking a point in the state that can have -    lstrip applied. -    """ - -    __slots__ = () - -    # Even though it looks like a no-op, creating instances fails -    # without this. -    def __new__(cls, *members, **kwargs):  # type: ignore -        return super().__new__(cls, members) - - -class _Rule(t.NamedTuple): -    pattern: t.Pattern[str] -    tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]] -    command: t.Optional[str] - - -class Lexer: -    """Class that implements a lexer for a given environment. Automatically -    created by the environment class, usually you don't have to do that. - -    Note that the lexer is not automatically bound to an environment. -    Multiple environments can share the same lexer. -    """ - -    def __init__(self, environment: "Environment") -> None: -        # shortcuts -        e = re.escape - -        def c(x: str) -> t.Pattern[str]: -            return re.compile(x, re.M | re.S) - -        # lexing rules for tags -        tag_rules: t.List[_Rule] = [ -            _Rule(whitespace_re, TOKEN_WHITESPACE, None), -            _Rule(float_re, TOKEN_FLOAT, None), -            _Rule(integer_re, TOKEN_INTEGER, None), -            _Rule(name_re, TOKEN_NAME, None), -            _Rule(string_re, TOKEN_STRING, None), -            _Rule(operator_re, TOKEN_OPERATOR, None), -        ] - -        # assemble the root lexing rule. because "|" is ungreedy -        # we have to sort by length so that the lexer continues working -        # as expected when we have parsing rules like <% for block and -        # <%= for variables. (if someone wants asp like syntax) -        # variables are just part of the rules if variable processing -        # is required. -        root_tag_rules = compile_rules(environment) - -        block_start_re = e(environment.block_start_string) -        block_end_re = e(environment.block_end_string) -        comment_end_re = e(environment.comment_end_string) -        variable_end_re = e(environment.variable_end_string) - -        # block suffix if trimming is enabled -        block_suffix_re = "\\n?" if environment.trim_blocks else "" - -        self.lstrip_blocks = environment.lstrip_blocks - -        self.newline_sequence = environment.newline_sequence -        self.keep_trailing_newline = environment.keep_trailing_newline - -        root_raw_re = ( -            rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*" -            rf"(?:\-{block_end_re}\s*|{block_end_re}))" -        ) -        root_parts_re = "|".join( -            [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules] -        ) - -        # global lexing rules -        self.rules: t.Dict[str, t.List[_Rule]] = { -            "root": [ -                # directives -                _Rule( -                    c(rf"(.*?)(?:{root_parts_re})"), -                    OptionalLStrip(TOKEN_DATA, "#bygroup"),  # type: ignore -                    "#bygroup", -                ), -                # data -                _Rule(c(".+"), TOKEN_DATA, None), -            ], -            # comments -            TOKEN_COMMENT_BEGIN: [ -                _Rule( -                    c( -                        rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*" -                        rf"|{comment_end_re}{block_suffix_re}))" -                    ), -                    (TOKEN_COMMENT, TOKEN_COMMENT_END), -                    "#pop", -                ), -                _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None), -            ], -            # blocks -            TOKEN_BLOCK_BEGIN: [ -                _Rule( -                    c( -                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*" -                        rf"|{block_end_re}{block_suffix_re})" -                    ), -                    TOKEN_BLOCK_END, -                    "#pop", -                ), -            ] -            + tag_rules, -            # variables -            TOKEN_VARIABLE_BEGIN: [ -                _Rule( -                    c(rf"\-{variable_end_re}\s*|{variable_end_re}"), -                    TOKEN_VARIABLE_END, -                    "#pop", -                ) -            ] -            + tag_rules, -            # raw block -            TOKEN_RAW_BEGIN: [ -                _Rule( -                    c( -                        rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*" -                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*" -                        rf"|{block_end_re}{block_suffix_re}))" -                    ), -                    OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),  # type: ignore -                    "#pop", -                ), -                _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None), -            ], -            # line statements -            TOKEN_LINESTATEMENT_BEGIN: [ -                _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop") -            ] -            + tag_rules, -            # line comments -            TOKEN_LINECOMMENT_BEGIN: [ -                _Rule( -                    c(r"(.*?)()(?=\n|$)"), -                    (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END), -                    "#pop", -                ) -            ], -        } - -    def _normalize_newlines(self, value: str) -> str: -        """Replace all newlines with the configured sequence in strings -        and template data. -        """ -        return newline_re.sub(self.newline_sequence, value) - -    def tokenize( -        self, -        source: str, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -        state: t.Optional[str] = None, -    ) -> TokenStream: -        """Calls tokeniter + tokenize and wraps it in a token stream.""" -        stream = self.tokeniter(source, name, filename, state) -        return TokenStream(self.wrap(stream, name, filename), name, filename) - -    def wrap( -        self, -        stream: t.Iterable[t.Tuple[int, str, str]], -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -    ) -> t.Iterator[Token]: -        """This is called with the stream as returned by `tokenize` and wraps -        every token in a :class:`Token` and converts the value. -        """ -        for lineno, token, value_str in stream: -            if token in ignored_tokens: -                continue - -            value: t.Any = value_str - -            if token == TOKEN_LINESTATEMENT_BEGIN: -                token = TOKEN_BLOCK_BEGIN -            elif token == TOKEN_LINESTATEMENT_END: -                token = TOKEN_BLOCK_END -            # we are not interested in those tokens in the parser -            elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END): -                continue -            elif token == TOKEN_DATA: -                value = self._normalize_newlines(value_str) -            elif token == "keyword": -                token = value_str -            elif token == TOKEN_NAME: -                value = value_str - -                if not value.isidentifier(): -                    raise TemplateSyntaxError( -                        "Invalid character in identifier", lineno, name, filename -                    ) -            elif token == TOKEN_STRING: -                # try to unescape string -                try: -                    value = ( -                        self._normalize_newlines(value_str[1:-1]) -                        .encode("ascii", "backslashreplace") -                        .decode("unicode-escape") -                    ) -                except Exception as e: -                    msg = str(e).split(":")[-1].strip() -                    raise TemplateSyntaxError(msg, lineno, name, filename) from e -            elif token == TOKEN_INTEGER: -                value = int(value_str.replace("_", ""), 0) -            elif token == TOKEN_FLOAT: -                # remove all "_" first to support more Python versions -                value = literal_eval(value_str.replace("_", "")) -            elif token == TOKEN_OPERATOR: -                token = operators[value_str] - -            yield Token(lineno, token, value) - -    def tokeniter( -        self, -        source: str, -        name: t.Optional[str], -        filename: t.Optional[str] = None, -        state: t.Optional[str] = None, -    ) -> t.Iterator[t.Tuple[int, str, str]]: -        """This method tokenizes the text and returns the tokens in a -        generator. Use this method if you just want to tokenize a template. - -        .. versionchanged:: 3.0 -            Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line -            breaks. -        """ -        lines = newline_re.split(source)[::2] - -        if not self.keep_trailing_newline and lines[-1] == "": -            del lines[-1] - -        source = "\n".join(lines) -        pos = 0 -        lineno = 1 -        stack = ["root"] - -        if state is not None and state != "root": -            assert state in ("variable", "block"), "invalid state" -            stack.append(state + "_begin") - -        statetokens = self.rules[stack[-1]] -        source_length = len(source) -        balancing_stack: t.List[str] = [] -        newlines_stripped = 0 -        line_starting = True - -        while True: -            # tokenizer loop -            for regex, tokens, new_state in statetokens: -                m = regex.match(source, pos) - -                # if no match we try again with the next rule -                if m is None: -                    continue - -                # we only match blocks and variables if braces / parentheses -                # are balanced. continue parsing with the lower rule which -                # is the operator rule. do this only if the end tags look -                # like operators -                if balancing_stack and tokens in ( -                    TOKEN_VARIABLE_END, -                    TOKEN_BLOCK_END, -                    TOKEN_LINESTATEMENT_END, -                ): -                    continue - -                # tuples support more options -                if isinstance(tokens, tuple): -                    groups: t.Sequence[str] = m.groups() - -                    if isinstance(tokens, OptionalLStrip): -                        # Rule supports lstrip. Match will look like -                        # text, block type, whitespace control, type, control, ... -                        text = groups[0] -                        # Skipping the text and first type, every other group is the -                        # whitespace control for each type. One of the groups will be -                        # -, +, or empty string instead of None. -                        strip_sign = next(g for g in groups[2::2] if g is not None) - -                        if strip_sign == "-": -                            # Strip all whitespace between the text and the tag. -                            stripped = text.rstrip() -                            newlines_stripped = text[len(stripped) :].count("\n") -                            groups = [stripped, *groups[1:]] -                        elif ( -                            # Not marked for preserving whitespace. -                            strip_sign != "+" -                            # lstrip is enabled. -                            and self.lstrip_blocks -                            # Not a variable expression. -                            and not m.groupdict().get(TOKEN_VARIABLE_BEGIN) -                        ): -                            # The start of text between the last newline and the tag. -                            l_pos = text.rfind("\n") + 1 - -                            if l_pos > 0 or line_starting: -                                # If there's only whitespace between the newline and the -                                # tag, strip it. -                                if whitespace_re.fullmatch(text, l_pos): -                                    groups = [text[:l_pos], *groups[1:]] - -                    for idx, token in enumerate(tokens): -                        # failure group -                        if token.__class__ is Failure: -                            raise token(lineno, filename) -                        # bygroup is a bit more complex, in that case we -                        # yield for the current token the first named -                        # group that matched -                        elif token == "#bygroup": -                            for key, value in m.groupdict().items(): -                                if value is not None: -                                    yield lineno, key, value -                                    lineno += value.count("\n") -                                    break -                            else: -                                raise RuntimeError( -                                    f"{regex!r} wanted to resolve the token dynamically" -                                    " but no group matched" -                                ) -                        # normal group -                        else: -                            data = groups[idx] - -                            if data or token not in ignore_if_empty: -                                yield lineno, token, data - -                            lineno += data.count("\n") + newlines_stripped -                            newlines_stripped = 0 - -                # strings as token just are yielded as it. -                else: -                    data = m.group() - -                    # update brace/parentheses balance -                    if tokens == TOKEN_OPERATOR: -                        if data == "{": -                            balancing_stack.append("}") -                        elif data == "(": -                            balancing_stack.append(")") -                        elif data == "[": -                            balancing_stack.append("]") -                        elif data in ("}", ")", "]"): -                            if not balancing_stack: -                                raise TemplateSyntaxError( -                                    f"unexpected '{data}'", lineno, name, filename -                                ) - -                            expected_op = balancing_stack.pop() - -                            if expected_op != data: -                                raise TemplateSyntaxError( -                                    f"unexpected '{data}', expected '{expected_op}'", -                                    lineno, -                                    name, -                                    filename, -                                ) - -                    # yield items -                    if data or tokens not in ignore_if_empty: -                        yield lineno, tokens, data - -                    lineno += data.count("\n") - -                line_starting = m.group()[-1:] == "\n" -                # fetch new position into new variable so that we can check -                # if there is a internal parsing error which would result -                # in an infinite loop -                pos2 = m.end() - -                # handle state changes -                if new_state is not None: -                    # remove the uppermost state -                    if new_state == "#pop": -                        stack.pop() -                    # resolve the new state by group checking -                    elif new_state == "#bygroup": -                        for key, value in m.groupdict().items(): -                            if value is not None: -                                stack.append(key) -                                break -                        else: -                            raise RuntimeError( -                                f"{regex!r} wanted to resolve the new state dynamically" -                                f" but no group matched" -                            ) -                    # direct state name given -                    else: -                        stack.append(new_state) - -                    statetokens = self.rules[stack[-1]] -                # we are still at the same position and no stack change. -                # this means a loop without break condition, avoid that and -                # raise error -                elif pos2 == pos: -                    raise RuntimeError( -                        f"{regex!r} yielded empty string without stack change" -                    ) - -                # publish new function and start again -                pos = pos2 -                break -            # if loop terminated without break we haven't found a single match -            # either we are at the end of the file or we have a problem -            else: -                # end of text -                if pos >= source_length: -                    return - -                # something went wrong -                raise TemplateSyntaxError( -                    f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename -                ) diff --git a/venv/lib/python3.11/site-packages/jinja2/loaders.py b/venv/lib/python3.11/site-packages/jinja2/loaders.py deleted file mode 100644 index 32f3a74..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/loaders.py +++ /dev/null @@ -1,661 +0,0 @@ -"""API and implementations for loading templates from different data -sources. -""" -import importlib.util -import os -import posixpath -import sys -import typing as t -import weakref -import zipimport -from collections import abc -from hashlib import sha1 -from importlib import import_module -from types import ModuleType - -from .exceptions import TemplateNotFound -from .utils import internalcode - -if t.TYPE_CHECKING: -    from .environment import Environment -    from .environment import Template - - -def split_template_path(template: str) -> t.List[str]: -    """Split a path into segments and perform a sanity check.  If it detects -    '..' in the path it will raise a `TemplateNotFound` error. -    """ -    pieces = [] -    for piece in template.split("/"): -        if ( -            os.path.sep in piece -            or (os.path.altsep and os.path.altsep in piece) -            or piece == os.path.pardir -        ): -            raise TemplateNotFound(template) -        elif piece and piece != ".": -            pieces.append(piece) -    return pieces - - -class BaseLoader: -    """Baseclass for all loaders.  Subclass this and override `get_source` to -    implement a custom loading mechanism.  The environment provides a -    `get_template` method that calls the loader's `load` method to get the -    :class:`Template` object. - -    A very basic example for a loader that looks up templates on the file -    system could look like this:: - -        from jinja2 import BaseLoader, TemplateNotFound -        from os.path import join, exists, getmtime - -        class MyLoader(BaseLoader): - -            def __init__(self, path): -                self.path = path - -            def get_source(self, environment, template): -                path = join(self.path, template) -                if not exists(path): -                    raise TemplateNotFound(template) -                mtime = getmtime(path) -                with open(path) as f: -                    source = f.read() -                return source, path, lambda: mtime == getmtime(path) -    """ - -    #: if set to `False` it indicates that the loader cannot provide access -    #: to the source of templates. -    #: -    #: .. versionadded:: 2.4 -    has_source_access = True - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: -        """Get the template source, filename and reload helper for a template. -        It's passed the environment and template name and has to return a -        tuple in the form ``(source, filename, uptodate)`` or raise a -        `TemplateNotFound` error if it can't locate the template. - -        The source part of the returned tuple must be the source of the -        template as a string. The filename should be the name of the -        file on the filesystem if it was loaded from there, otherwise -        ``None``. The filename is used by Python for the tracebacks -        if no loader extension is used. - -        The last item in the tuple is the `uptodate` function.  If auto -        reloading is enabled it's always called to check if the template -        changed.  No arguments are passed so the function must store the -        old state somewhere (for example in a closure).  If it returns `False` -        the template will be reloaded. -        """ -        if not self.has_source_access: -            raise RuntimeError( -                f"{type(self).__name__} cannot provide access to the source" -            ) -        raise TemplateNotFound(template) - -    def list_templates(self) -> t.List[str]: -        """Iterates over all templates.  If the loader does not support that -        it should raise a :exc:`TypeError` which is the default behavior. -        """ -        raise TypeError("this loader cannot iterate over all templates") - -    @internalcode -    def load( -        self, -        environment: "Environment", -        name: str, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        """Loads a template.  This method looks up the template in the cache -        or loads one by calling :meth:`get_source`.  Subclasses should not -        override this method as loaders working on collections of other -        loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) -        will not call this method but `get_source` directly. -        """ -        code = None -        if globals is None: -            globals = {} - -        # first we try to get the source for this template together -        # with the filename and the uptodate function. -        source, filename, uptodate = self.get_source(environment, name) - -        # try to load the code from the bytecode cache if there is a -        # bytecode cache configured. -        bcc = environment.bytecode_cache -        if bcc is not None: -            bucket = bcc.get_bucket(environment, name, filename, source) -            code = bucket.code - -        # if we don't have code so far (not cached, no longer up to -        # date) etc. we compile the template -        if code is None: -            code = environment.compile(source, name, filename) - -        # if the bytecode cache is available and the bucket doesn't -        # have a code so far, we give the bucket the new code and put -        # it back to the bytecode cache. -        if bcc is not None and bucket.code is None: -            bucket.code = code -            bcc.set_bucket(bucket) - -        return environment.template_class.from_code( -            environment, code, globals, uptodate -        ) - - -class FileSystemLoader(BaseLoader): -    """Load templates from a directory in the file system. - -    The path can be relative or absolute. Relative paths are relative to -    the current working directory. - -    .. code-block:: python - -        loader = FileSystemLoader("templates") - -    A list of paths can be given. The directories will be searched in -    order, stopping at the first matching template. - -    .. code-block:: python - -        loader = FileSystemLoader(["/override/templates", "/default/templates"]) - -    :param searchpath: A path, or list of paths, to the directory that -        contains the templates. -    :param encoding: Use this encoding to read the text from template -        files. -    :param followlinks: Follow symbolic links in the path. - -    .. versionchanged:: 2.8 -        Added the ``followlinks`` parameter. -    """ - -    def __init__( -        self, -        searchpath: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]], -        encoding: str = "utf-8", -        followlinks: bool = False, -    ) -> None: -        if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str): -            searchpath = [searchpath] - -        self.searchpath = [os.fspath(p) for p in searchpath] -        self.encoding = encoding -        self.followlinks = followlinks - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, str, t.Callable[[], bool]]: -        pieces = split_template_path(template) - -        for searchpath in self.searchpath: -            # Use posixpath even on Windows to avoid "drive:" or UNC -            # segments breaking out of the search directory. -            filename = posixpath.join(searchpath, *pieces) - -            if os.path.isfile(filename): -                break -        else: -            raise TemplateNotFound(template) - -        with open(filename, encoding=self.encoding) as f: -            contents = f.read() - -        mtime = os.path.getmtime(filename) - -        def uptodate() -> bool: -            try: -                return os.path.getmtime(filename) == mtime -            except OSError: -                return False - -        # Use normpath to convert Windows altsep to sep. -        return contents, os.path.normpath(filename), uptodate - -    def list_templates(self) -> t.List[str]: -        found = set() -        for searchpath in self.searchpath: -            walk_dir = os.walk(searchpath, followlinks=self.followlinks) -            for dirpath, _, filenames in walk_dir: -                for filename in filenames: -                    template = ( -                        os.path.join(dirpath, filename)[len(searchpath) :] -                        .strip(os.path.sep) -                        .replace(os.path.sep, "/") -                    ) -                    if template[:2] == "./": -                        template = template[2:] -                    if template not in found: -                        found.add(template) -        return sorted(found) - - -class PackageLoader(BaseLoader): -    """Load templates from a directory in a Python package. - -    :param package_name: Import name of the package that contains the -        template directory. -    :param package_path: Directory within the imported package that -        contains the templates. -    :param encoding: Encoding of template files. - -    The following example looks up templates in the ``pages`` directory -    within the ``project.ui`` package. - -    .. code-block:: python - -        loader = PackageLoader("project.ui", "pages") - -    Only packages installed as directories (standard pip behavior) or -    zip/egg files (less common) are supported. The Python API for -    introspecting data in packages is too limited to support other -    installation methods the way this loader requires. - -    There is limited support for :pep:`420` namespace packages. The -    template directory is assumed to only be in one namespace -    contributor. Zip files contributing to a namespace are not -    supported. - -    .. versionchanged:: 3.0 -        No longer uses ``setuptools`` as a dependency. - -    .. versionchanged:: 3.0 -        Limited PEP 420 namespace package support. -    """ - -    def __init__( -        self, -        package_name: str, -        package_path: "str" = "templates", -        encoding: str = "utf-8", -    ) -> None: -        package_path = os.path.normpath(package_path).rstrip(os.path.sep) - -        # normpath preserves ".", which isn't valid in zip paths. -        if package_path == os.path.curdir: -            package_path = "" -        elif package_path[:2] == os.path.curdir + os.path.sep: -            package_path = package_path[2:] - -        self.package_path = package_path -        self.package_name = package_name -        self.encoding = encoding - -        # Make sure the package exists. This also makes namespace -        # packages work, otherwise get_loader returns None. -        import_module(package_name) -        spec = importlib.util.find_spec(package_name) -        assert spec is not None, "An import spec was not found for the package." -        loader = spec.loader -        assert loader is not None, "A loader was not found for the package." -        self._loader = loader -        self._archive = None -        template_root = None - -        if isinstance(loader, zipimport.zipimporter): -            self._archive = loader.archive -            pkgdir = next(iter(spec.submodule_search_locations))  # type: ignore -            template_root = os.path.join(pkgdir, package_path).rstrip(os.path.sep) -        else: -            roots: t.List[str] = [] - -            # One element for regular packages, multiple for namespace -            # packages, or None for single module file. -            if spec.submodule_search_locations: -                roots.extend(spec.submodule_search_locations) -            # A single module file, use the parent directory instead. -            elif spec.origin is not None: -                roots.append(os.path.dirname(spec.origin)) - -            for root in roots: -                root = os.path.join(root, package_path) - -                if os.path.isdir(root): -                    template_root = root -                    break - -        if template_root is None: -            raise ValueError( -                f"The {package_name!r} package was not installed in a" -                " way that PackageLoader understands." -            ) - -        self._template_root = template_root - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]: -        # Use posixpath even on Windows to avoid "drive:" or UNC -        # segments breaking out of the search directory. Use normpath to -        # convert Windows altsep to sep. -        p = os.path.normpath( -            posixpath.join(self._template_root, *split_template_path(template)) -        ) -        up_to_date: t.Optional[t.Callable[[], bool]] - -        if self._archive is None: -            # Package is a directory. -            if not os.path.isfile(p): -                raise TemplateNotFound(template) - -            with open(p, "rb") as f: -                source = f.read() - -            mtime = os.path.getmtime(p) - -            def up_to_date() -> bool: -                return os.path.isfile(p) and os.path.getmtime(p) == mtime - -        else: -            # Package is a zip file. -            try: -                source = self._loader.get_data(p)  # type: ignore -            except OSError as e: -                raise TemplateNotFound(template) from e - -            # Could use the zip's mtime for all template mtimes, but -            # would need to safely reload the module if it's out of -            # date, so just report it as always current. -            up_to_date = None - -        return source.decode(self.encoding), p, up_to_date - -    def list_templates(self) -> t.List[str]: -        results: t.List[str] = [] - -        if self._archive is None: -            # Package is a directory. -            offset = len(self._template_root) - -            for dirpath, _, filenames in os.walk(self._template_root): -                dirpath = dirpath[offset:].lstrip(os.path.sep) -                results.extend( -                    os.path.join(dirpath, name).replace(os.path.sep, "/") -                    for name in filenames -                ) -        else: -            if not hasattr(self._loader, "_files"): -                raise TypeError( -                    "This zip import does not have the required" -                    " metadata to list templates." -                ) - -            # Package is a zip file. -            prefix = ( -                self._template_root[len(self._archive) :].lstrip(os.path.sep) -                + os.path.sep -            ) -            offset = len(prefix) - -            for name in self._loader._files.keys(): -                # Find names under the templates directory that aren't directories. -                if name.startswith(prefix) and name[-1] != os.path.sep: -                    results.append(name[offset:].replace(os.path.sep, "/")) - -        results.sort() -        return results - - -class DictLoader(BaseLoader): -    """Loads a template from a Python dict mapping template names to -    template source.  This loader is useful for unittesting: - -    >>> loader = DictLoader({'index.html': 'source here'}) - -    Because auto reloading is rarely useful this is disabled per default. -    """ - -    def __init__(self, mapping: t.Mapping[str, str]) -> None: -        self.mapping = mapping - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, None, t.Callable[[], bool]]: -        if template in self.mapping: -            source = self.mapping[template] -            return source, None, lambda: source == self.mapping.get(template) -        raise TemplateNotFound(template) - -    def list_templates(self) -> t.List[str]: -        return sorted(self.mapping) - - -class FunctionLoader(BaseLoader): -    """A loader that is passed a function which does the loading.  The -    function receives the name of the template and has to return either -    a string with the template source, a tuple in the form ``(source, -    filename, uptodatefunc)`` or `None` if the template does not exist. - -    >>> def load_template(name): -    ...     if name == 'index.html': -    ...         return '...' -    ... -    >>> loader = FunctionLoader(load_template) - -    The `uptodatefunc` is a function that is called if autoreload is enabled -    and has to return `True` if the template is still up to date.  For more -    details have a look at :meth:`BaseLoader.get_source` which has the same -    return value. -    """ - -    def __init__( -        self, -        load_func: t.Callable[ -            [str], -            t.Optional[ -                t.Union[ -                    str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]] -                ] -            ], -        ], -    ) -> None: -        self.load_func = load_func - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: -        rv = self.load_func(template) - -        if rv is None: -            raise TemplateNotFound(template) - -        if isinstance(rv, str): -            return rv, None, None - -        return rv - - -class PrefixLoader(BaseLoader): -    """A loader that is passed a dict of loaders where each loader is bound -    to a prefix.  The prefix is delimited from the template by a slash per -    default, which can be changed by setting the `delimiter` argument to -    something else:: - -        loader = PrefixLoader({ -            'app1':     PackageLoader('mypackage.app1'), -            'app2':     PackageLoader('mypackage.app2') -        }) - -    By loading ``'app1/index.html'`` the file from the app1 package is loaded, -    by loading ``'app2/index.html'`` the file from the second. -    """ - -    def __init__( -        self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/" -    ) -> None: -        self.mapping = mapping -        self.delimiter = delimiter - -    def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]: -        try: -            prefix, name = template.split(self.delimiter, 1) -            loader = self.mapping[prefix] -        except (ValueError, KeyError) as e: -            raise TemplateNotFound(template) from e -        return loader, name - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: -        loader, name = self.get_loader(template) -        try: -            return loader.get_source(environment, name) -        except TemplateNotFound as e: -            # re-raise the exception with the correct filename here. -            # (the one that includes the prefix) -            raise TemplateNotFound(template) from e - -    @internalcode -    def load( -        self, -        environment: "Environment", -        name: str, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        loader, local_name = self.get_loader(name) -        try: -            return loader.load(environment, local_name, globals) -        except TemplateNotFound as e: -            # re-raise the exception with the correct filename here. -            # (the one that includes the prefix) -            raise TemplateNotFound(name) from e - -    def list_templates(self) -> t.List[str]: -        result = [] -        for prefix, loader in self.mapping.items(): -            for template in loader.list_templates(): -                result.append(prefix + self.delimiter + template) -        return result - - -class ChoiceLoader(BaseLoader): -    """This loader works like the `PrefixLoader` just that no prefix is -    specified.  If a template could not be found by one loader the next one -    is tried. - -    >>> loader = ChoiceLoader([ -    ...     FileSystemLoader('/path/to/user/templates'), -    ...     FileSystemLoader('/path/to/system/templates') -    ... ]) - -    This is useful if you want to allow users to override builtin templates -    from a different location. -    """ - -    def __init__(self, loaders: t.Sequence[BaseLoader]) -> None: -        self.loaders = loaders - -    def get_source( -        self, environment: "Environment", template: str -    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: -        for loader in self.loaders: -            try: -                return loader.get_source(environment, template) -            except TemplateNotFound: -                pass -        raise TemplateNotFound(template) - -    @internalcode -    def load( -        self, -        environment: "Environment", -        name: str, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        for loader in self.loaders: -            try: -                return loader.load(environment, name, globals) -            except TemplateNotFound: -                pass -        raise TemplateNotFound(name) - -    def list_templates(self) -> t.List[str]: -        found = set() -        for loader in self.loaders: -            found.update(loader.list_templates()) -        return sorted(found) - - -class _TemplateModule(ModuleType): -    """Like a normal module but with support for weak references""" - - -class ModuleLoader(BaseLoader): -    """This loader loads templates from precompiled templates. - -    Example usage: - -    >>> loader = ChoiceLoader([ -    ...     ModuleLoader('/path/to/compiled/templates'), -    ...     FileSystemLoader('/path/to/templates') -    ... ]) - -    Templates can be precompiled with :meth:`Environment.compile_templates`. -    """ - -    has_source_access = False - -    def __init__( -        self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]] -    ) -> None: -        package_name = f"_jinja2_module_templates_{id(self):x}" - -        # create a fake module that looks for the templates in the -        # path given. -        mod = _TemplateModule(package_name) - -        if not isinstance(path, abc.Iterable) or isinstance(path, str): -            path = [path] - -        mod.__path__ = [os.fspath(p) for p in path] - -        sys.modules[package_name] = weakref.proxy( -            mod, lambda x: sys.modules.pop(package_name, None) -        ) - -        # the only strong reference, the sys.modules entry is weak -        # so that the garbage collector can remove it once the -        # loader that created it goes out of business. -        self.module = mod -        self.package_name = package_name - -    @staticmethod -    def get_template_key(name: str) -> str: -        return "tmpl_" + sha1(name.encode("utf-8")).hexdigest() - -    @staticmethod -    def get_module_filename(name: str) -> str: -        return ModuleLoader.get_template_key(name) + ".py" - -    @internalcode -    def load( -        self, -        environment: "Environment", -        name: str, -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ) -> "Template": -        key = self.get_template_key(name) -        module = f"{self.package_name}.{key}" -        mod = getattr(self.module, module, None) - -        if mod is None: -            try: -                mod = __import__(module, None, None, ["root"]) -            except ImportError as e: -                raise TemplateNotFound(name) from e - -            # remove the entry from sys.modules, we only want the attribute -            # on the module object we have stored on the loader. -            sys.modules.pop(module, None) - -        if globals is None: -            globals = {} - -        return environment.template_class.from_module_dict( -            environment, mod.__dict__, globals -        ) diff --git a/venv/lib/python3.11/site-packages/jinja2/meta.py b/venv/lib/python3.11/site-packages/jinja2/meta.py deleted file mode 100644 index 0057d6e..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/meta.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Functions that expose information about templates that might be -interesting for introspection. -""" -import typing as t - -from . import nodes -from .compiler import CodeGenerator -from .compiler import Frame - -if t.TYPE_CHECKING: -    from .environment import Environment - - -class TrackingCodeGenerator(CodeGenerator): -    """We abuse the code generator for introspection.""" - -    def __init__(self, environment: "Environment") -> None: -        super().__init__(environment, "<introspection>", "<introspection>") -        self.undeclared_identifiers: t.Set[str] = set() - -    def write(self, x: str) -> None: -        """Don't write.""" - -    def enter_frame(self, frame: Frame) -> None: -        """Remember all undeclared identifiers.""" -        super().enter_frame(frame) - -        for _, (action, param) in frame.symbols.loads.items(): -            if action == "resolve" and param not in self.environment.globals: -                self.undeclared_identifiers.add(param) - - -def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]: -    """Returns a set of all variables in the AST that will be looked up from -    the context at runtime.  Because at compile time it's not known which -    variables will be used depending on the path the execution takes at -    runtime, all variables are returned. - -    >>> from jinja2 import Environment, meta -    >>> env = Environment() -    >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') -    >>> meta.find_undeclared_variables(ast) == {'bar'} -    True - -    .. admonition:: Implementation - -       Internally the code generator is used for finding undeclared variables. -       This is good to know because the code generator might raise a -       :exc:`TemplateAssertionError` during compilation and as a matter of -       fact this function can currently raise that exception as well. -    """ -    codegen = TrackingCodeGenerator(ast.environment)  # type: ignore -    codegen.visit(ast) -    return codegen.undeclared_identifiers - - -_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include) -_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include] - - -def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]: -    """Finds all the referenced templates from the AST.  This will return an -    iterator over all the hardcoded template extensions, inclusions and -    imports.  If dynamic inheritance or inclusion is used, `None` will be -    yielded. - -    >>> from jinja2 import Environment, meta -    >>> env = Environment() -    >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') -    >>> list(meta.find_referenced_templates(ast)) -    ['layout.html', None] - -    This function is useful for dependency tracking.  For example if you want -    to rebuild parts of the website after a layout template has changed. -    """ -    template_name: t.Any - -    for node in ast.find_all(_ref_types): -        template: nodes.Expr = node.template  # type: ignore - -        if not isinstance(template, nodes.Const): -            # a tuple with some non consts in there -            if isinstance(template, (nodes.Tuple, nodes.List)): -                for template_name in template.items: -                    # something const, only yield the strings and ignore -                    # non-string consts that really just make no sense -                    if isinstance(template_name, nodes.Const): -                        if isinstance(template_name.value, str): -                            yield template_name.value -                    # something dynamic in there -                    else: -                        yield None -            # something dynamic we don't know about here -            else: -                yield None -            continue -        # constant is a basestring, direct template name -        if isinstance(template.value, str): -            yield template.value -        # a tuple or list (latter *should* not happen) made of consts, -        # yield the consts that are strings.  We could warn here for -        # non string values -        elif isinstance(node, nodes.Include) and isinstance( -            template.value, (tuple, list) -        ): -            for template_name in template.value: -                if isinstance(template_name, str): -                    yield template_name -        # something else we don't care about, we could warn here -        else: -            yield None diff --git a/venv/lib/python3.11/site-packages/jinja2/nativetypes.py b/venv/lib/python3.11/site-packages/jinja2/nativetypes.py deleted file mode 100644 index 71db8cc..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/nativetypes.py +++ /dev/null @@ -1,130 +0,0 @@ -import typing as t -from ast import literal_eval -from ast import parse -from itertools import chain -from itertools import islice -from types import GeneratorType - -from . import nodes -from .compiler import CodeGenerator -from .compiler import Frame -from .compiler import has_safe_repr -from .environment import Environment -from .environment import Template - - -def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]: -    """Return a native Python type from the list of compiled nodes. If -    the result is a single node, its value is returned. Otherwise, the -    nodes are concatenated as strings. If the result can be parsed with -    :func:`ast.literal_eval`, the parsed value is returned. Otherwise, -    the string is returned. - -    :param values: Iterable of outputs to concatenate. -    """ -    head = list(islice(values, 2)) - -    if not head: -        return None - -    if len(head) == 1: -        raw = head[0] -        if not isinstance(raw, str): -            return raw -    else: -        if isinstance(values, GeneratorType): -            values = chain(head, values) -        raw = "".join([str(v) for v in values]) - -    try: -        return literal_eval( -            # In Python 3.10+ ast.literal_eval removes leading spaces/tabs -            # from the given string. For backwards compatibility we need to -            # parse the string ourselves without removing leading spaces/tabs. -            parse(raw, mode="eval") -        ) -    except (ValueError, SyntaxError, MemoryError): -        return raw - - -class NativeCodeGenerator(CodeGenerator): -    """A code generator which renders Python types by not adding -    ``str()`` around output nodes. -    """ - -    @staticmethod -    def _default_finalize(value: t.Any) -> t.Any: -        return value - -    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: -        return repr("".join([str(v) for v in group])) - -    def _output_child_to_const( -        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo -    ) -> t.Any: -        const = node.as_const(frame.eval_ctx) - -        if not has_safe_repr(const): -            raise nodes.Impossible() - -        if isinstance(node, nodes.TemplateData): -            return const - -        return finalize.const(const)  # type: ignore - -    def _output_child_pre( -        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo -    ) -> None: -        if finalize.src is not None: -            self.write(finalize.src) - -    def _output_child_post( -        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo -    ) -> None: -        if finalize.src is not None: -            self.write(")") - - -class NativeEnvironment(Environment): -    """An environment that renders templates to native Python types.""" - -    code_generator_class = NativeCodeGenerator -    concat = staticmethod(native_concat)  # type: ignore - - -class NativeTemplate(Template): -    environment_class = NativeEnvironment - -    def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any: -        """Render the template to produce a native Python type. If the -        result is a single node, its value is returned. Otherwise, the -        nodes are concatenated as strings. If the result can be parsed -        with :func:`ast.literal_eval`, the parsed value is returned. -        Otherwise, the string is returned. -        """ -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            return self.environment_class.concat(  # type: ignore -                self.root_render_func(ctx) -            ) -        except Exception: -            return self.environment.handle_exception() - -    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any: -        if not self.environment.is_async: -            raise RuntimeError( -                "The environment was not created with async mode enabled." -            ) - -        ctx = self.new_context(dict(*args, **kwargs)) - -        try: -            return self.environment_class.concat(  # type: ignore -                [n async for n in self.root_render_func(ctx)]  # type: ignore -            ) -        except Exception: -            return self.environment.handle_exception() - - -NativeEnvironment.template_class = NativeTemplate diff --git a/venv/lib/python3.11/site-packages/jinja2/nodes.py b/venv/lib/python3.11/site-packages/jinja2/nodes.py deleted file mode 100644 index b2f88d9..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/nodes.py +++ /dev/null @@ -1,1204 +0,0 @@ -"""AST nodes generated by the parser for the compiler. Also provides -some node tree helper functions used by the parser and compiler in order -to normalize nodes. -""" -import inspect -import operator -import typing as t -from collections import deque - -from markupsafe import Markup - -from .utils import _PassArg - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment - -_NodeBound = t.TypeVar("_NodeBound", bound="Node") - -_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { -    "*": operator.mul, -    "/": operator.truediv, -    "//": operator.floordiv, -    "**": operator.pow, -    "%": operator.mod, -    "+": operator.add, -    "-": operator.sub, -} - -_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = { -    "not": operator.not_, -    "+": operator.pos, -    "-": operator.neg, -} - -_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { -    "eq": operator.eq, -    "ne": operator.ne, -    "gt": operator.gt, -    "gteq": operator.ge, -    "lt": operator.lt, -    "lteq": operator.le, -    "in": lambda a, b: a in b, -    "notin": lambda a, b: a not in b, -} - - -class Impossible(Exception): -    """Raised if the node could not perform a requested action.""" - - -class NodeType(type): -    """A metaclass for nodes that handles the field and attribute -    inheritance.  fields and attributes from the parent class are -    automatically forwarded to the child.""" - -    def __new__(mcs, name, bases, d):  # type: ignore -        for attr in "fields", "attributes": -            storage = [] -            storage.extend(getattr(bases[0] if bases else object, attr, ())) -            storage.extend(d.get(attr, ())) -            assert len(bases) <= 1, "multiple inheritance not allowed" -            assert len(storage) == len(set(storage)), "layout conflict" -            d[attr] = tuple(storage) -        d.setdefault("abstract", False) -        return type.__new__(mcs, name, bases, d) - - -class EvalContext: -    """Holds evaluation time information.  Custom attributes can be attached -    to it in extensions. -    """ - -    def __init__( -        self, environment: "Environment", template_name: t.Optional[str] = None -    ) -> None: -        self.environment = environment -        if callable(environment.autoescape): -            self.autoescape = environment.autoescape(template_name) -        else: -            self.autoescape = environment.autoescape -        self.volatile = False - -    def save(self) -> t.Mapping[str, t.Any]: -        return self.__dict__.copy() - -    def revert(self, old: t.Mapping[str, t.Any]) -> None: -        self.__dict__.clear() -        self.__dict__.update(old) - - -def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext: -    if ctx is None: -        if node.environment is None: -            raise RuntimeError( -                "if no eval context is passed, the node must have an" -                " attached environment." -            ) -        return EvalContext(node.environment) -    return ctx - - -class Node(metaclass=NodeType): -    """Baseclass for all Jinja nodes.  There are a number of nodes available -    of different types.  There are four major types: - -    -   :class:`Stmt`: statements -    -   :class:`Expr`: expressions -    -   :class:`Helper`: helper nodes -    -   :class:`Template`: the outermost wrapper node - -    All nodes have fields and attributes.  Fields may be other nodes, lists, -    or arbitrary values.  Fields are passed to the constructor as regular -    positional arguments, attributes as keyword arguments.  Each node has -    two attributes: `lineno` (the line number of the node) and `environment`. -    The `environment` attribute is set at the end of the parsing process for -    all nodes automatically. -    """ - -    fields: t.Tuple[str, ...] = () -    attributes: t.Tuple[str, ...] = ("lineno", "environment") -    abstract = True - -    lineno: int -    environment: t.Optional["Environment"] - -    def __init__(self, *fields: t.Any, **attributes: t.Any) -> None: -        if self.abstract: -            raise TypeError("abstract nodes are not instantiable") -        if fields: -            if len(fields) != len(self.fields): -                if not self.fields: -                    raise TypeError(f"{type(self).__name__!r} takes 0 arguments") -                raise TypeError( -                    f"{type(self).__name__!r} takes 0 or {len(self.fields)}" -                    f" argument{'s' if len(self.fields) != 1 else ''}" -                ) -            for name, arg in zip(self.fields, fields): -                setattr(self, name, arg) -        for attr in self.attributes: -            setattr(self, attr, attributes.pop(attr, None)) -        if attributes: -            raise TypeError(f"unknown attribute {next(iter(attributes))!r}") - -    def iter_fields( -        self, -        exclude: t.Optional[t.Container[str]] = None, -        only: t.Optional[t.Container[str]] = None, -    ) -> t.Iterator[t.Tuple[str, t.Any]]: -        """This method iterates over all fields that are defined and yields -        ``(key, value)`` tuples.  Per default all fields are returned, but -        it's possible to limit that to some fields by providing the `only` -        parameter or to exclude some using the `exclude` parameter.  Both -        should be sets or tuples of field names. -        """ -        for name in self.fields: -            if ( -                (exclude is None and only is None) -                or (exclude is not None and name not in exclude) -                or (only is not None and name in only) -            ): -                try: -                    yield name, getattr(self, name) -                except AttributeError: -                    pass - -    def iter_child_nodes( -        self, -        exclude: t.Optional[t.Container[str]] = None, -        only: t.Optional[t.Container[str]] = None, -    ) -> t.Iterator["Node"]: -        """Iterates over all direct child nodes of the node.  This iterates -        over all fields and yields the values of they are nodes.  If the value -        of a field is a list all the nodes in that list are returned. -        """ -        for _, item in self.iter_fields(exclude, only): -            if isinstance(item, list): -                for n in item: -                    if isinstance(n, Node): -                        yield n -            elif isinstance(item, Node): -                yield item - -    def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]: -        """Find the first node of a given type.  If no such node exists the -        return value is `None`. -        """ -        for result in self.find_all(node_type): -            return result - -        return None - -    def find_all( -        self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]] -    ) -> t.Iterator[_NodeBound]: -        """Find all the nodes of a given type.  If the type is a tuple, -        the check is performed for any of the tuple items. -        """ -        for child in self.iter_child_nodes(): -            if isinstance(child, node_type): -                yield child  # type: ignore -            yield from child.find_all(node_type) - -    def set_ctx(self, ctx: str) -> "Node": -        """Reset the context of a node and all child nodes.  Per default the -        parser will all generate nodes that have a 'load' context as it's the -        most common one.  This method is used in the parser to set assignment -        targets and other nodes to a store context. -        """ -        todo = deque([self]) -        while todo: -            node = todo.popleft() -            if "ctx" in node.fields: -                node.ctx = ctx  # type: ignore -            todo.extend(node.iter_child_nodes()) -        return self - -    def set_lineno(self, lineno: int, override: bool = False) -> "Node": -        """Set the line numbers of the node and children.""" -        todo = deque([self]) -        while todo: -            node = todo.popleft() -            if "lineno" in node.attributes: -                if node.lineno is None or override: -                    node.lineno = lineno -            todo.extend(node.iter_child_nodes()) -        return self - -    def set_environment(self, environment: "Environment") -> "Node": -        """Set the environment for all nodes.""" -        todo = deque([self]) -        while todo: -            node = todo.popleft() -            node.environment = environment -            todo.extend(node.iter_child_nodes()) -        return self - -    def __eq__(self, other: t.Any) -> bool: -        if type(self) is not type(other): -            return NotImplemented - -        return tuple(self.iter_fields()) == tuple(other.iter_fields()) - -    __hash__ = object.__hash__ - -    def __repr__(self) -> str: -        args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields) -        return f"{type(self).__name__}({args_str})" - -    def dump(self) -> str: -        def _dump(node: t.Union[Node, t.Any]) -> None: -            if not isinstance(node, Node): -                buf.append(repr(node)) -                return - -            buf.append(f"nodes.{type(node).__name__}(") -            if not node.fields: -                buf.append(")") -                return -            for idx, field in enumerate(node.fields): -                if idx: -                    buf.append(", ") -                value = getattr(node, field) -                if isinstance(value, list): -                    buf.append("[") -                    for idx, item in enumerate(value): -                        if idx: -                            buf.append(", ") -                        _dump(item) -                    buf.append("]") -                else: -                    _dump(value) -            buf.append(")") - -        buf: t.List[str] = [] -        _dump(self) -        return "".join(buf) - - -class Stmt(Node): -    """Base node for all statements.""" - -    abstract = True - - -class Helper(Node): -    """Nodes that exist in a specific context only.""" - -    abstract = True - - -class Template(Node): -    """Node that represents a template.  This must be the outermost node that -    is passed to the compiler. -    """ - -    fields = ("body",) -    body: t.List[Node] - - -class Output(Stmt): -    """A node that holds multiple expressions which are then printed out. -    This is used both for the `print` statement and the regular template data. -    """ - -    fields = ("nodes",) -    nodes: t.List["Expr"] - - -class Extends(Stmt): -    """Represents an extends statement.""" - -    fields = ("template",) -    template: "Expr" - - -class For(Stmt): -    """The for loop.  `target` is the target for the iteration (usually a -    :class:`Name` or :class:`Tuple`), `iter` the iterable.  `body` is a list -    of nodes that are used as loop-body, and `else_` a list of nodes for the -    `else` block.  If no else node exists it has to be an empty list. - -    For filtered nodes an expression can be stored as `test`, otherwise `None`. -    """ - -    fields = ("target", "iter", "body", "else_", "test", "recursive") -    target: Node -    iter: Node -    body: t.List[Node] -    else_: t.List[Node] -    test: t.Optional[Node] -    recursive: bool - - -class If(Stmt): -    """If `test` is true, `body` is rendered, else `else_`.""" - -    fields = ("test", "body", "elif_", "else_") -    test: Node -    body: t.List[Node] -    elif_: t.List["If"] -    else_: t.List[Node] - - -class Macro(Stmt): -    """A macro definition.  `name` is the name of the macro, `args` a list of -    arguments and `defaults` a list of defaults if there are any.  `body` is -    a list of nodes for the macro body. -    """ - -    fields = ("name", "args", "defaults", "body") -    name: str -    args: t.List["Name"] -    defaults: t.List["Expr"] -    body: t.List[Node] - - -class CallBlock(Stmt): -    """Like a macro without a name but a call instead.  `call` is called with -    the unnamed macro as `caller` argument this node holds. -    """ - -    fields = ("call", "args", "defaults", "body") -    call: "Call" -    args: t.List["Name"] -    defaults: t.List["Expr"] -    body: t.List[Node] - - -class FilterBlock(Stmt): -    """Node for filter sections.""" - -    fields = ("body", "filter") -    body: t.List[Node] -    filter: "Filter" - - -class With(Stmt): -    """Specific node for with statements.  In older versions of Jinja the -    with statement was implemented on the base of the `Scope` node instead. - -    .. versionadded:: 2.9.3 -    """ - -    fields = ("targets", "values", "body") -    targets: t.List["Expr"] -    values: t.List["Expr"] -    body: t.List[Node] - - -class Block(Stmt): -    """A node that represents a block. - -    .. versionchanged:: 3.0.0 -        the `required` field was added. -    """ - -    fields = ("name", "body", "scoped", "required") -    name: str -    body: t.List[Node] -    scoped: bool -    required: bool - - -class Include(Stmt): -    """A node that represents the include tag.""" - -    fields = ("template", "with_context", "ignore_missing") -    template: "Expr" -    with_context: bool -    ignore_missing: bool - - -class Import(Stmt): -    """A node that represents the import tag.""" - -    fields = ("template", "target", "with_context") -    template: "Expr" -    target: str -    with_context: bool - - -class FromImport(Stmt): -    """A node that represents the from import tag.  It's important to not -    pass unsafe names to the name attribute.  The compiler translates the -    attribute lookups directly into getattr calls and does *not* use the -    subscript callback of the interface.  As exported variables may not -    start with double underscores (which the parser asserts) this is not a -    problem for regular Jinja code, but if this node is used in an extension -    extra care must be taken. - -    The list of names may contain tuples if aliases are wanted. -    """ - -    fields = ("template", "names", "with_context") -    template: "Expr" -    names: t.List[t.Union[str, t.Tuple[str, str]]] -    with_context: bool - - -class ExprStmt(Stmt): -    """A statement that evaluates an expression and discards the result.""" - -    fields = ("node",) -    node: Node - - -class Assign(Stmt): -    """Assigns an expression to a target.""" - -    fields = ("target", "node") -    target: "Expr" -    node: Node - - -class AssignBlock(Stmt): -    """Assigns a block to a target.""" - -    fields = ("target", "filter", "body") -    target: "Expr" -    filter: t.Optional["Filter"] -    body: t.List[Node] - - -class Expr(Node): -    """Baseclass for all expressions.""" - -    abstract = True - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        """Return the value of the expression as constant or raise -        :exc:`Impossible` if this was not possible. - -        An :class:`EvalContext` can be provided, if none is given -        a default context is created which requires the nodes to have -        an attached environment. - -        .. versionchanged:: 2.4 -           the `eval_ctx` parameter was added. -        """ -        raise Impossible() - -    def can_assign(self) -> bool: -        """Check if it's possible to assign something to this node.""" -        return False - - -class BinExpr(Expr): -    """Baseclass for all binary expressions.""" - -    fields = ("left", "right") -    left: Expr -    right: Expr -    operator: str -    abstract = True - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) - -        # intercepted operators cannot be folded at compile time -        if ( -            eval_ctx.environment.sandboxed -            and self.operator in eval_ctx.environment.intercepted_binops  # type: ignore -        ): -            raise Impossible() -        f = _binop_to_func[self.operator] -        try: -            return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx)) -        except Exception as e: -            raise Impossible() from e - - -class UnaryExpr(Expr): -    """Baseclass for all unary expressions.""" - -    fields = ("node",) -    node: Expr -    operator: str -    abstract = True - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) - -        # intercepted operators cannot be folded at compile time -        if ( -            eval_ctx.environment.sandboxed -            and self.operator in eval_ctx.environment.intercepted_unops  # type: ignore -        ): -            raise Impossible() -        f = _uaop_to_func[self.operator] -        try: -            return f(self.node.as_const(eval_ctx)) -        except Exception as e: -            raise Impossible() from e - - -class Name(Expr): -    """Looks up a name or stores a value in a name. -    The `ctx` of the node can be one of the following values: - -    -   `store`: store a value in the name -    -   `load`: load that name -    -   `param`: like `store` but if the name was defined as function parameter. -    """ - -    fields = ("name", "ctx") -    name: str -    ctx: str - -    def can_assign(self) -> bool: -        return self.name not in {"true", "false", "none", "True", "False", "None"} - - -class NSRef(Expr): -    """Reference to a namespace value assignment""" - -    fields = ("name", "attr") -    name: str -    attr: str - -    def can_assign(self) -> bool: -        # We don't need any special checks here; NSRef assignments have a -        # runtime check to ensure the target is a namespace object which will -        # have been checked already as it is created using a normal assignment -        # which goes through a `Name` node. -        return True - - -class Literal(Expr): -    """Baseclass for literals.""" - -    abstract = True - - -class Const(Literal): -    """All constant values.  The parser will return this node for simple -    constants such as ``42`` or ``"foo"`` but it can be used to store more -    complex values such as lists too.  Only constants with a safe -    representation (objects where ``eval(repr(x)) == x`` is true). -    """ - -    fields = ("value",) -    value: t.Any - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        return self.value - -    @classmethod -    def from_untrusted( -        cls, -        value: t.Any, -        lineno: t.Optional[int] = None, -        environment: "t.Optional[Environment]" = None, -    ) -> "Const": -        """Return a const object if the value is representable as -        constant value in the generated code, otherwise it will raise -        an `Impossible` exception. -        """ -        from .compiler import has_safe_repr - -        if not has_safe_repr(value): -            raise Impossible() -        return cls(value, lineno=lineno, environment=environment) - - -class TemplateData(Literal): -    """A constant template string.""" - -    fields = ("data",) -    data: str - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: -        eval_ctx = get_eval_context(self, eval_ctx) -        if eval_ctx.volatile: -            raise Impossible() -        if eval_ctx.autoescape: -            return Markup(self.data) -        return self.data - - -class Tuple(Literal): -    """For loop unpacking and some other things like multiple arguments -    for subscripts.  Like for :class:`Name` `ctx` specifies if the tuple -    is used for loading the names or storing. -    """ - -    fields = ("items", "ctx") -    items: t.List[Expr] -    ctx: str - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]: -        eval_ctx = get_eval_context(self, eval_ctx) -        return tuple(x.as_const(eval_ctx) for x in self.items) - -    def can_assign(self) -> bool: -        for item in self.items: -            if not item.can_assign(): -                return False -        return True - - -class List(Literal): -    """Any list literal such as ``[1, 2, 3]``""" - -    fields = ("items",) -    items: t.List[Expr] - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]: -        eval_ctx = get_eval_context(self, eval_ctx) -        return [x.as_const(eval_ctx) for x in self.items] - - -class Dict(Literal): -    """Any dict literal such as ``{1: 2, 3: 4}``.  The items must be a list of -    :class:`Pair` nodes. -    """ - -    fields = ("items",) -    items: t.List["Pair"] - -    def as_const( -        self, eval_ctx: t.Optional[EvalContext] = None -    ) -> t.Dict[t.Any, t.Any]: -        eval_ctx = get_eval_context(self, eval_ctx) -        return dict(x.as_const(eval_ctx) for x in self.items) - - -class Pair(Helper): -    """A key, value pair for dicts.""" - -    fields = ("key", "value") -    key: Expr -    value: Expr - -    def as_const( -        self, eval_ctx: t.Optional[EvalContext] = None -    ) -> t.Tuple[t.Any, t.Any]: -        eval_ctx = get_eval_context(self, eval_ctx) -        return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx) - - -class Keyword(Helper): -    """A key, value pair for keyword arguments where key is a string.""" - -    fields = ("key", "value") -    key: str -    value: Expr - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]: -        eval_ctx = get_eval_context(self, eval_ctx) -        return self.key, self.value.as_const(eval_ctx) - - -class CondExpr(Expr): -    """A conditional expression (inline if expression).  (``{{ -    foo if bar else baz }}``) -    """ - -    fields = ("test", "expr1", "expr2") -    test: Expr -    expr1: Expr -    expr2: t.Optional[Expr] - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) -        if self.test.as_const(eval_ctx): -            return self.expr1.as_const(eval_ctx) - -        # if we evaluate to an undefined object, we better do that at runtime -        if self.expr2 is None: -            raise Impossible() - -        return self.expr2.as_const(eval_ctx) - - -def args_as_const( -    node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext] -) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]: -    args = [x.as_const(eval_ctx) for x in node.args] -    kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs) - -    if node.dyn_args is not None: -        try: -            args.extend(node.dyn_args.as_const(eval_ctx)) -        except Exception as e: -            raise Impossible() from e - -    if node.dyn_kwargs is not None: -        try: -            kwargs.update(node.dyn_kwargs.as_const(eval_ctx)) -        except Exception as e: -            raise Impossible() from e - -    return args, kwargs - - -class _FilterTestCommon(Expr): -    fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs") -    node: Expr -    name: str -    args: t.List[Expr] -    kwargs: t.List[Pair] -    dyn_args: t.Optional[Expr] -    dyn_kwargs: t.Optional[Expr] -    abstract = True -    _is_filter = True - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) - -        if eval_ctx.volatile: -            raise Impossible() - -        if self._is_filter: -            env_map = eval_ctx.environment.filters -        else: -            env_map = eval_ctx.environment.tests - -        func = env_map.get(self.name) -        pass_arg = _PassArg.from_obj(func)  # type: ignore - -        if func is None or pass_arg is _PassArg.context: -            raise Impossible() - -        if eval_ctx.environment.is_async and ( -            getattr(func, "jinja_async_variant", False) is True -            or inspect.iscoroutinefunction(func) -        ): -            raise Impossible() - -        args, kwargs = args_as_const(self, eval_ctx) -        args.insert(0, self.node.as_const(eval_ctx)) - -        if pass_arg is _PassArg.eval_context: -            args.insert(0, eval_ctx) -        elif pass_arg is _PassArg.environment: -            args.insert(0, eval_ctx.environment) - -        try: -            return func(*args, **kwargs) -        except Exception as e: -            raise Impossible() from e - - -class Filter(_FilterTestCommon): -    """Apply a filter to an expression. ``name`` is the name of the -    filter, the other fields are the same as :class:`Call`. - -    If ``node`` is ``None``, the filter is being used in a filter block -    and is applied to the content of the block. -    """ - -    node: t.Optional[Expr]  # type: ignore - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        if self.node is None: -            raise Impossible() - -        return super().as_const(eval_ctx=eval_ctx) - - -class Test(_FilterTestCommon): -    """Apply a test to an expression. ``name`` is the name of the test, -    the other field are the same as :class:`Call`. - -    .. versionchanged:: 3.0 -        ``as_const`` shares the same logic for filters and tests. Tests -        check for volatile, async, and ``@pass_context`` etc. -        decorators. -    """ - -    _is_filter = False - - -class Call(Expr): -    """Calls an expression.  `args` is a list of arguments, `kwargs` a list -    of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args` -    and `dyn_kwargs` has to be either `None` or a node that is used as -    node for dynamic positional (``*args``) or keyword (``**kwargs``) -    arguments. -    """ - -    fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs") -    node: Expr -    args: t.List[Expr] -    kwargs: t.List[Keyword] -    dyn_args: t.Optional[Expr] -    dyn_kwargs: t.Optional[Expr] - - -class Getitem(Expr): -    """Get an attribute or item from an expression and prefer the item.""" - -    fields = ("node", "arg", "ctx") -    node: Expr -    arg: Expr -    ctx: str - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        if self.ctx != "load": -            raise Impossible() - -        eval_ctx = get_eval_context(self, eval_ctx) - -        try: -            return eval_ctx.environment.getitem( -                self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx) -            ) -        except Exception as e: -            raise Impossible() from e - - -class Getattr(Expr): -    """Get an attribute or item from an expression that is a ascii-only -    bytestring and prefer the attribute. -    """ - -    fields = ("node", "attr", "ctx") -    node: Expr -    attr: str -    ctx: str - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        if self.ctx != "load": -            raise Impossible() - -        eval_ctx = get_eval_context(self, eval_ctx) - -        try: -            return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr) -        except Exception as e: -            raise Impossible() from e - - -class Slice(Expr): -    """Represents a slice object.  This must only be used as argument for -    :class:`Subscript`. -    """ - -    fields = ("start", "stop", "step") -    start: t.Optional[Expr] -    stop: t.Optional[Expr] -    step: t.Optional[Expr] - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice: -        eval_ctx = get_eval_context(self, eval_ctx) - -        def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]: -            if obj is None: -                return None -            return obj.as_const(eval_ctx) - -        return slice(const(self.start), const(self.stop), const(self.step)) - - -class Concat(Expr): -    """Concatenates the list of expressions provided after converting -    them to strings. -    """ - -    fields = ("nodes",) -    nodes: t.List[Expr] - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: -        eval_ctx = get_eval_context(self, eval_ctx) -        return "".join(str(x.as_const(eval_ctx)) for x in self.nodes) - - -class Compare(Expr): -    """Compares an expression with some other expressions.  `ops` must be a -    list of :class:`Operand`\\s. -    """ - -    fields = ("expr", "ops") -    expr: Expr -    ops: t.List["Operand"] - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) -        result = value = self.expr.as_const(eval_ctx) - -        try: -            for op in self.ops: -                new_value = op.expr.as_const(eval_ctx) -                result = _cmpop_to_func[op.op](value, new_value) - -                if not result: -                    return False - -                value = new_value -        except Exception as e: -            raise Impossible() from e - -        return result - - -class Operand(Helper): -    """Holds an operator and an expression.""" - -    fields = ("op", "expr") -    op: str -    expr: Expr - - -class Mul(BinExpr): -    """Multiplies the left with the right node.""" - -    operator = "*" - - -class Div(BinExpr): -    """Divides the left by the right node.""" - -    operator = "/" - - -class FloorDiv(BinExpr): -    """Divides the left by the right node and converts the -    result into an integer by truncating. -    """ - -    operator = "//" - - -class Add(BinExpr): -    """Add the left to the right node.""" - -    operator = "+" - - -class Sub(BinExpr): -    """Subtract the right from the left node.""" - -    operator = "-" - - -class Mod(BinExpr): -    """Left modulo right.""" - -    operator = "%" - - -class Pow(BinExpr): -    """Left to the power of right.""" - -    operator = "**" - - -class And(BinExpr): -    """Short circuited AND.""" - -    operator = "and" - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) -        return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx) - - -class Or(BinExpr): -    """Short circuited OR.""" - -    operator = "or" - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: -        eval_ctx = get_eval_context(self, eval_ctx) -        return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx) - - -class Not(UnaryExpr): -    """Negate the expression.""" - -    operator = "not" - - -class Neg(UnaryExpr): -    """Make the expression negative.""" - -    operator = "-" - - -class Pos(UnaryExpr): -    """Make the expression positive (noop for most expressions)""" - -    operator = "+" - - -# Helpers for extensions - - -class EnvironmentAttribute(Expr): -    """Loads an attribute from the environment object.  This is useful for -    extensions that want to call a callback stored on the environment. -    """ - -    fields = ("name",) -    name: str - - -class ExtensionAttribute(Expr): -    """Returns the attribute of an extension bound to the environment. -    The identifier is the identifier of the :class:`Extension`. - -    This node is usually constructed by calling the -    :meth:`~jinja2.ext.Extension.attr` method on an extension. -    """ - -    fields = ("identifier", "name") -    identifier: str -    name: str - - -class ImportedName(Expr): -    """If created with an import name the import name is returned on node -    access.  For example ``ImportedName('cgi.escape')`` returns the `escape` -    function from the cgi module on evaluation.  Imports are optimized by the -    compiler so there is no need to assign them to local variables. -    """ - -    fields = ("importname",) -    importname: str - - -class InternalName(Expr): -    """An internal name in the compiler.  You cannot create these nodes -    yourself but the parser provides a -    :meth:`~jinja2.parser.Parser.free_identifier` method that creates -    a new identifier for you.  This identifier is not available from the -    template and is not treated specially by the compiler. -    """ - -    fields = ("name",) -    name: str - -    def __init__(self) -> None: -        raise TypeError( -            "Can't create internal names.  Use the " -            "`free_identifier` method on a parser." -        ) - - -class MarkSafe(Expr): -    """Mark the wrapped expression as safe (wrap it as `Markup`).""" - -    fields = ("expr",) -    expr: Expr - -    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup: -        eval_ctx = get_eval_context(self, eval_ctx) -        return Markup(self.expr.as_const(eval_ctx)) - - -class MarkSafeIfAutoescape(Expr): -    """Mark the wrapped expression as safe (wrap it as `Markup`) but -    only if autoescaping is active. - -    .. versionadded:: 2.5 -    """ - -    fields = ("expr",) -    expr: Expr - -    def as_const( -        self, eval_ctx: t.Optional[EvalContext] = None -    ) -> t.Union[Markup, t.Any]: -        eval_ctx = get_eval_context(self, eval_ctx) -        if eval_ctx.volatile: -            raise Impossible() -        expr = self.expr.as_const(eval_ctx) -        if eval_ctx.autoescape: -            return Markup(expr) -        return expr - - -class ContextReference(Expr): -    """Returns the current template context.  It can be used like a -    :class:`Name` node, with a ``'load'`` ctx and will return the -    current :class:`~jinja2.runtime.Context` object. - -    Here an example that assigns the current template name to a -    variable named `foo`:: - -        Assign(Name('foo', ctx='store'), -               Getattr(ContextReference(), 'name')) - -    This is basically equivalent to using the -    :func:`~jinja2.pass_context` decorator when using the high-level -    API, which causes a reference to the context to be passed as the -    first argument to a function. -    """ - - -class DerivedContextReference(Expr): -    """Return the current template context including locals. Behaves -    exactly like :class:`ContextReference`, but includes local -    variables, such as from a ``for`` loop. - -    .. versionadded:: 2.11 -    """ - - -class Continue(Stmt): -    """Continue a loop.""" - - -class Break(Stmt): -    """Break a loop.""" - - -class Scope(Stmt): -    """An artificial scope.""" - -    fields = ("body",) -    body: t.List[Node] - - -class OverlayScope(Stmt): -    """An overlay scope for extensions.  This is a largely unoptimized scope -    that however can be used to introduce completely arbitrary variables into -    a sub scope from a dictionary or dictionary like object.  The `context` -    field has to evaluate to a dictionary object. - -    Example usage:: - -        OverlayScope(context=self.call_method('get_context'), -                     body=[...]) - -    .. versionadded:: 2.10 -    """ - -    fields = ("context", "body") -    context: Expr -    body: t.List[Node] - - -class EvalContextModifier(Stmt): -    """Modifies the eval context.  For each option that should be modified, -    a :class:`Keyword` has to be added to the :attr:`options` list. - -    Example to change the `autoescape` setting:: - -        EvalContextModifier(options=[Keyword('autoescape', Const(True))]) -    """ - -    fields = ("options",) -    options: t.List[Keyword] - - -class ScopedEvalContextModifier(EvalContextModifier): -    """Modifies the eval context and reverts it later.  Works exactly like -    :class:`EvalContextModifier` but will only modify the -    :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`. -    """ - -    fields = ("body",) -    body: t.List[Node] - - -# make sure nobody creates custom nodes -def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn": -    raise TypeError("can't create custom node types") - - -NodeType.__new__ = staticmethod(_failing_new)  # type: ignore -del _failing_new diff --git a/venv/lib/python3.11/site-packages/jinja2/optimizer.py b/venv/lib/python3.11/site-packages/jinja2/optimizer.py deleted file mode 100644 index fe10107..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/optimizer.py +++ /dev/null @@ -1,47 +0,0 @@ -"""The optimizer tries to constant fold expressions and modify the AST -in place so that it should be faster to evaluate. - -Because the AST does not contain all the scoping information and the -compiler has to find that out, we cannot do all the optimizations we -want. For example, loop unrolling doesn't work because unrolled loops -would have a different scope. The solution would be a second syntax tree -that stored the scoping rules. -""" -import typing as t - -from . import nodes -from .visitor import NodeTransformer - -if t.TYPE_CHECKING: -    from .environment import Environment - - -def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node: -    """The context hint can be used to perform an static optimization -    based on the context given.""" -    optimizer = Optimizer(environment) -    return t.cast(nodes.Node, optimizer.visit(node)) - - -class Optimizer(NodeTransformer): -    def __init__(self, environment: "t.Optional[Environment]") -> None: -        self.environment = environment - -    def generic_visit( -        self, node: nodes.Node, *args: t.Any, **kwargs: t.Any -    ) -> nodes.Node: -        node = super().generic_visit(node, *args, **kwargs) - -        # Do constant folding. Some other nodes besides Expr have -        # as_const, but folding them causes errors later on. -        if isinstance(node, nodes.Expr): -            try: -                return nodes.Const.from_untrusted( -                    node.as_const(args[0] if args else None), -                    lineno=node.lineno, -                    environment=self.environment, -                ) -            except nodes.Impossible: -                pass - -        return node diff --git a/venv/lib/python3.11/site-packages/jinja2/parser.py b/venv/lib/python3.11/site-packages/jinja2/parser.py deleted file mode 100644 index 3354bc9..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/parser.py +++ /dev/null @@ -1,1034 +0,0 @@ -"""Parse tokens from the lexer into nodes for the compiler.""" -import typing -import typing as t - -from . import nodes -from .exceptions import TemplateAssertionError -from .exceptions import TemplateSyntaxError -from .lexer import describe_token -from .lexer import describe_token_expr - -if t.TYPE_CHECKING: -    import typing_extensions as te -    from .environment import Environment - -_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include) -_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock) - -_statement_keywords = frozenset( -    [ -        "for", -        "if", -        "block", -        "extends", -        "print", -        "macro", -        "include", -        "from", -        "import", -        "set", -        "with", -        "autoescape", -    ] -) -_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"]) - -_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = { -    "add": nodes.Add, -    "sub": nodes.Sub, -    "mul": nodes.Mul, -    "div": nodes.Div, -    "floordiv": nodes.FloorDiv, -    "mod": nodes.Mod, -} - - -class Parser: -    """This is the central parsing class Jinja uses.  It's passed to -    extensions and can be used to parse expressions or statements. -    """ - -    def __init__( -        self, -        environment: "Environment", -        source: str, -        name: t.Optional[str] = None, -        filename: t.Optional[str] = None, -        state: t.Optional[str] = None, -    ) -> None: -        self.environment = environment -        self.stream = environment._tokenize(source, name, filename, state) -        self.name = name -        self.filename = filename -        self.closed = False -        self.extensions: t.Dict[ -            str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]] -        ] = {} -        for extension in environment.iter_extensions(): -            for tag in extension.tags: -                self.extensions[tag] = extension.parse -        self._last_identifier = 0 -        self._tag_stack: t.List[str] = [] -        self._end_token_stack: t.List[t.Tuple[str, ...]] = [] - -    def fail( -        self, -        msg: str, -        lineno: t.Optional[int] = None, -        exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError, -    ) -> "te.NoReturn": -        """Convenience method that raises `exc` with the message, passed -        line number or last line number as well as the current name and -        filename. -        """ -        if lineno is None: -            lineno = self.stream.current.lineno -        raise exc(msg, lineno, self.name, self.filename) - -    def _fail_ut_eof( -        self, -        name: t.Optional[str], -        end_token_stack: t.List[t.Tuple[str, ...]], -        lineno: t.Optional[int], -    ) -> "te.NoReturn": -        expected: t.Set[str] = set() -        for exprs in end_token_stack: -            expected.update(map(describe_token_expr, exprs)) -        if end_token_stack: -            currently_looking: t.Optional[str] = " or ".join( -                map(repr, map(describe_token_expr, end_token_stack[-1])) -            ) -        else: -            currently_looking = None - -        if name is None: -            message = ["Unexpected end of template."] -        else: -            message = [f"Encountered unknown tag {name!r}."] - -        if currently_looking: -            if name is not None and name in expected: -                message.append( -                    "You probably made a nesting mistake. Jinja is expecting this tag," -                    f" but currently looking for {currently_looking}." -                ) -            else: -                message.append( -                    f"Jinja was looking for the following tags: {currently_looking}." -                ) - -        if self._tag_stack: -            message.append( -                "The innermost block that needs to be closed is" -                f" {self._tag_stack[-1]!r}." -            ) - -        self.fail(" ".join(message), lineno) - -    def fail_unknown_tag( -        self, name: str, lineno: t.Optional[int] = None -    ) -> "te.NoReturn": -        """Called if the parser encounters an unknown tag.  Tries to fail -        with a human readable error message that could help to identify -        the problem. -        """ -        self._fail_ut_eof(name, self._end_token_stack, lineno) - -    def fail_eof( -        self, -        end_tokens: t.Optional[t.Tuple[str, ...]] = None, -        lineno: t.Optional[int] = None, -    ) -> "te.NoReturn": -        """Like fail_unknown_tag but for end of template situations.""" -        stack = list(self._end_token_stack) -        if end_tokens is not None: -            stack.append(end_tokens) -        self._fail_ut_eof(None, stack, lineno) - -    def is_tuple_end( -        self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None -    ) -> bool: -        """Are we at the end of a tuple?""" -        if self.stream.current.type in ("variable_end", "block_end", "rparen"): -            return True -        elif extra_end_rules is not None: -            return self.stream.current.test_any(extra_end_rules)  # type: ignore -        return False - -    def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName: -        """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" -        self._last_identifier += 1 -        rv = object.__new__(nodes.InternalName) -        nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno) -        return rv - -    def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]: -        """Parse a single statement.""" -        token = self.stream.current -        if token.type != "name": -            self.fail("tag name expected", token.lineno) -        self._tag_stack.append(token.value) -        pop_tag = True -        try: -            if token.value in _statement_keywords: -                f = getattr(self, f"parse_{self.stream.current.value}") -                return f()  # type: ignore -            if token.value == "call": -                return self.parse_call_block() -            if token.value == "filter": -                return self.parse_filter_block() -            ext = self.extensions.get(token.value) -            if ext is not None: -                return ext(self) - -            # did not work out, remove the token we pushed by accident -            # from the stack so that the unknown tag fail function can -            # produce a proper error message. -            self._tag_stack.pop() -            pop_tag = False -            self.fail_unknown_tag(token.value, token.lineno) -        finally: -            if pop_tag: -                self._tag_stack.pop() - -    def parse_statements( -        self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False -    ) -> t.List[nodes.Node]: -        """Parse multiple statements into a list until one of the end tokens -        is reached.  This is used to parse the body of statements as it also -        parses template data if appropriate.  The parser checks first if the -        current token is a colon and skips it if there is one.  Then it checks -        for the block end and parses until if one of the `end_tokens` is -        reached.  Per default the active token in the stream at the end of -        the call is the matched end token.  If this is not wanted `drop_needle` -        can be set to `True` and the end token is removed. -        """ -        # the first token may be a colon for python compatibility -        self.stream.skip_if("colon") - -        # in the future it would be possible to add whole code sections -        # by adding some sort of end of statement token and parsing those here. -        self.stream.expect("block_end") -        result = self.subparse(end_tokens) - -        # we reached the end of the template too early, the subparser -        # does not check for this, so we do that now -        if self.stream.current.type == "eof": -            self.fail_eof(end_tokens) - -        if drop_needle: -            next(self.stream) -        return result - -    def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]: -        """Parse an assign statement.""" -        lineno = next(self.stream).lineno -        target = self.parse_assign_target(with_namespace=True) -        if self.stream.skip_if("assign"): -            expr = self.parse_tuple() -            return nodes.Assign(target, expr, lineno=lineno) -        filter_node = self.parse_filter(None) -        body = self.parse_statements(("name:endset",), drop_needle=True) -        return nodes.AssignBlock(target, filter_node, body, lineno=lineno) - -    def parse_for(self) -> nodes.For: -        """Parse a for loop.""" -        lineno = self.stream.expect("name:for").lineno -        target = self.parse_assign_target(extra_end_rules=("name:in",)) -        self.stream.expect("name:in") -        iter = self.parse_tuple( -            with_condexpr=False, extra_end_rules=("name:recursive",) -        ) -        test = None -        if self.stream.skip_if("name:if"): -            test = self.parse_expression() -        recursive = self.stream.skip_if("name:recursive") -        body = self.parse_statements(("name:endfor", "name:else")) -        if next(self.stream).value == "endfor": -            else_ = [] -        else: -            else_ = self.parse_statements(("name:endfor",), drop_needle=True) -        return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno) - -    def parse_if(self) -> nodes.If: -        """Parse an if construct.""" -        node = result = nodes.If(lineno=self.stream.expect("name:if").lineno) -        while True: -            node.test = self.parse_tuple(with_condexpr=False) -            node.body = self.parse_statements(("name:elif", "name:else", "name:endif")) -            node.elif_ = [] -            node.else_ = [] -            token = next(self.stream) -            if token.test("name:elif"): -                node = nodes.If(lineno=self.stream.current.lineno) -                result.elif_.append(node) -                continue -            elif token.test("name:else"): -                result.else_ = self.parse_statements(("name:endif",), drop_needle=True) -            break -        return result - -    def parse_with(self) -> nodes.With: -        node = nodes.With(lineno=next(self.stream).lineno) -        targets: t.List[nodes.Expr] = [] -        values: t.List[nodes.Expr] = [] -        while self.stream.current.type != "block_end": -            if targets: -                self.stream.expect("comma") -            target = self.parse_assign_target() -            target.set_ctx("param") -            targets.append(target) -            self.stream.expect("assign") -            values.append(self.parse_expression()) -        node.targets = targets -        node.values = values -        node.body = self.parse_statements(("name:endwith",), drop_needle=True) -        return node - -    def parse_autoescape(self) -> nodes.Scope: -        node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno) -        node.options = [nodes.Keyword("autoescape", self.parse_expression())] -        node.body = self.parse_statements(("name:endautoescape",), drop_needle=True) -        return nodes.Scope([node]) - -    def parse_block(self) -> nodes.Block: -        node = nodes.Block(lineno=next(self.stream).lineno) -        node.name = self.stream.expect("name").value -        node.scoped = self.stream.skip_if("name:scoped") -        node.required = self.stream.skip_if("name:required") - -        # common problem people encounter when switching from django -        # to jinja.  we do not support hyphens in block names, so let's -        # raise a nicer error message in that case. -        if self.stream.current.type == "sub": -            self.fail( -                "Block names in Jinja have to be valid Python identifiers and may not" -                " contain hyphens, use an underscore instead." -            ) - -        node.body = self.parse_statements(("name:endblock",), drop_needle=True) - -        # enforce that required blocks only contain whitespace or comments -        # by asserting that the body, if not empty, is just TemplateData nodes -        # with whitespace data -        if node.required: -            for body_node in node.body: -                if not isinstance(body_node, nodes.Output) or any( -                    not isinstance(output_node, nodes.TemplateData) -                    or not output_node.data.isspace() -                    for output_node in body_node.nodes -                ): -                    self.fail("Required blocks can only contain comments or whitespace") - -        self.stream.skip_if("name:" + node.name) -        return node - -    def parse_extends(self) -> nodes.Extends: -        node = nodes.Extends(lineno=next(self.stream).lineno) -        node.template = self.parse_expression() -        return node - -    def parse_import_context( -        self, node: _ImportInclude, default: bool -    ) -> _ImportInclude: -        if self.stream.current.test_any( -            "name:with", "name:without" -        ) and self.stream.look().test("name:context"): -            node.with_context = next(self.stream).value == "with" -            self.stream.skip() -        else: -            node.with_context = default -        return node - -    def parse_include(self) -> nodes.Include: -        node = nodes.Include(lineno=next(self.stream).lineno) -        node.template = self.parse_expression() -        if self.stream.current.test("name:ignore") and self.stream.look().test( -            "name:missing" -        ): -            node.ignore_missing = True -            self.stream.skip(2) -        else: -            node.ignore_missing = False -        return self.parse_import_context(node, True) - -    def parse_import(self) -> nodes.Import: -        node = nodes.Import(lineno=next(self.stream).lineno) -        node.template = self.parse_expression() -        self.stream.expect("name:as") -        node.target = self.parse_assign_target(name_only=True).name -        return self.parse_import_context(node, False) - -    def parse_from(self) -> nodes.FromImport: -        node = nodes.FromImport(lineno=next(self.stream).lineno) -        node.template = self.parse_expression() -        self.stream.expect("name:import") -        node.names = [] - -        def parse_context() -> bool: -            if self.stream.current.value in { -                "with", -                "without", -            } and self.stream.look().test("name:context"): -                node.with_context = next(self.stream).value == "with" -                self.stream.skip() -                return True -            return False - -        while True: -            if node.names: -                self.stream.expect("comma") -            if self.stream.current.type == "name": -                if parse_context(): -                    break -                target = self.parse_assign_target(name_only=True) -                if target.name.startswith("_"): -                    self.fail( -                        "names starting with an underline can not be imported", -                        target.lineno, -                        exc=TemplateAssertionError, -                    ) -                if self.stream.skip_if("name:as"): -                    alias = self.parse_assign_target(name_only=True) -                    node.names.append((target.name, alias.name)) -                else: -                    node.names.append(target.name) -                if parse_context() or self.stream.current.type != "comma": -                    break -            else: -                self.stream.expect("name") -        if not hasattr(node, "with_context"): -            node.with_context = False -        return node - -    def parse_signature(self, node: _MacroCall) -> None: -        args = node.args = [] -        defaults = node.defaults = [] -        self.stream.expect("lparen") -        while self.stream.current.type != "rparen": -            if args: -                self.stream.expect("comma") -            arg = self.parse_assign_target(name_only=True) -            arg.set_ctx("param") -            if self.stream.skip_if("assign"): -                defaults.append(self.parse_expression()) -            elif defaults: -                self.fail("non-default argument follows default argument") -            args.append(arg) -        self.stream.expect("rparen") - -    def parse_call_block(self) -> nodes.CallBlock: -        node = nodes.CallBlock(lineno=next(self.stream).lineno) -        if self.stream.current.type == "lparen": -            self.parse_signature(node) -        else: -            node.args = [] -            node.defaults = [] - -        call_node = self.parse_expression() -        if not isinstance(call_node, nodes.Call): -            self.fail("expected call", node.lineno) -        node.call = call_node -        node.body = self.parse_statements(("name:endcall",), drop_needle=True) -        return node - -    def parse_filter_block(self) -> nodes.FilterBlock: -        node = nodes.FilterBlock(lineno=next(self.stream).lineno) -        node.filter = self.parse_filter(None, start_inline=True)  # type: ignore -        node.body = self.parse_statements(("name:endfilter",), drop_needle=True) -        return node - -    def parse_macro(self) -> nodes.Macro: -        node = nodes.Macro(lineno=next(self.stream).lineno) -        node.name = self.parse_assign_target(name_only=True).name -        self.parse_signature(node) -        node.body = self.parse_statements(("name:endmacro",), drop_needle=True) -        return node - -    def parse_print(self) -> nodes.Output: -        node = nodes.Output(lineno=next(self.stream).lineno) -        node.nodes = [] -        while self.stream.current.type != "block_end": -            if node.nodes: -                self.stream.expect("comma") -            node.nodes.append(self.parse_expression()) -        return node - -    @typing.overload -    def parse_assign_target( -        self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ... -    ) -> nodes.Name: -        ... - -    @typing.overload -    def parse_assign_target( -        self, -        with_tuple: bool = True, -        name_only: bool = False, -        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, -        with_namespace: bool = False, -    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: -        ... - -    def parse_assign_target( -        self, -        with_tuple: bool = True, -        name_only: bool = False, -        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, -        with_namespace: bool = False, -    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: -        """Parse an assignment target.  As Jinja allows assignments to -        tuples, this function can parse all allowed assignment targets.  Per -        default assignments to tuples are parsed, that can be disable however -        by setting `with_tuple` to `False`.  If only assignments to names are -        wanted `name_only` can be set to `True`.  The `extra_end_rules` -        parameter is forwarded to the tuple parsing function.  If -        `with_namespace` is enabled, a namespace assignment may be parsed. -        """ -        target: nodes.Expr - -        if with_namespace and self.stream.look().type == "dot": -            token = self.stream.expect("name") -            next(self.stream)  # dot -            attr = self.stream.expect("name") -            target = nodes.NSRef(token.value, attr.value, lineno=token.lineno) -        elif name_only: -            token = self.stream.expect("name") -            target = nodes.Name(token.value, "store", lineno=token.lineno) -        else: -            if with_tuple: -                target = self.parse_tuple( -                    simplified=True, extra_end_rules=extra_end_rules -                ) -            else: -                target = self.parse_primary() - -            target.set_ctx("store") - -        if not target.can_assign(): -            self.fail( -                f"can't assign to {type(target).__name__.lower()!r}", target.lineno -            ) - -        return target  # type: ignore - -    def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr: -        """Parse an expression.  Per default all expressions are parsed, if -        the optional `with_condexpr` parameter is set to `False` conditional -        expressions are not parsed. -        """ -        if with_condexpr: -            return self.parse_condexpr() -        return self.parse_or() - -    def parse_condexpr(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        expr1 = self.parse_or() -        expr3: t.Optional[nodes.Expr] - -        while self.stream.skip_if("name:if"): -            expr2 = self.parse_or() -            if self.stream.skip_if("name:else"): -                expr3 = self.parse_condexpr() -            else: -                expr3 = None -            expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno) -            lineno = self.stream.current.lineno -        return expr1 - -    def parse_or(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        left = self.parse_and() -        while self.stream.skip_if("name:or"): -            right = self.parse_and() -            left = nodes.Or(left, right, lineno=lineno) -            lineno = self.stream.current.lineno -        return left - -    def parse_and(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        left = self.parse_not() -        while self.stream.skip_if("name:and"): -            right = self.parse_not() -            left = nodes.And(left, right, lineno=lineno) -            lineno = self.stream.current.lineno -        return left - -    def parse_not(self) -> nodes.Expr: -        if self.stream.current.test("name:not"): -            lineno = next(self.stream).lineno -            return nodes.Not(self.parse_not(), lineno=lineno) -        return self.parse_compare() - -    def parse_compare(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        expr = self.parse_math1() -        ops = [] -        while True: -            token_type = self.stream.current.type -            if token_type in _compare_operators: -                next(self.stream) -                ops.append(nodes.Operand(token_type, self.parse_math1())) -            elif self.stream.skip_if("name:in"): -                ops.append(nodes.Operand("in", self.parse_math1())) -            elif self.stream.current.test("name:not") and self.stream.look().test( -                "name:in" -            ): -                self.stream.skip(2) -                ops.append(nodes.Operand("notin", self.parse_math1())) -            else: -                break -            lineno = self.stream.current.lineno -        if not ops: -            return expr -        return nodes.Compare(expr, ops, lineno=lineno) - -    def parse_math1(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        left = self.parse_concat() -        while self.stream.current.type in ("add", "sub"): -            cls = _math_nodes[self.stream.current.type] -            next(self.stream) -            right = self.parse_concat() -            left = cls(left, right, lineno=lineno) -            lineno = self.stream.current.lineno -        return left - -    def parse_concat(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        args = [self.parse_math2()] -        while self.stream.current.type == "tilde": -            next(self.stream) -            args.append(self.parse_math2()) -        if len(args) == 1: -            return args[0] -        return nodes.Concat(args, lineno=lineno) - -    def parse_math2(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        left = self.parse_pow() -        while self.stream.current.type in ("mul", "div", "floordiv", "mod"): -            cls = _math_nodes[self.stream.current.type] -            next(self.stream) -            right = self.parse_pow() -            left = cls(left, right, lineno=lineno) -            lineno = self.stream.current.lineno -        return left - -    def parse_pow(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        left = self.parse_unary() -        while self.stream.current.type == "pow": -            next(self.stream) -            right = self.parse_unary() -            left = nodes.Pow(left, right, lineno=lineno) -            lineno = self.stream.current.lineno -        return left - -    def parse_unary(self, with_filter: bool = True) -> nodes.Expr: -        token_type = self.stream.current.type -        lineno = self.stream.current.lineno -        node: nodes.Expr - -        if token_type == "sub": -            next(self.stream) -            node = nodes.Neg(self.parse_unary(False), lineno=lineno) -        elif token_type == "add": -            next(self.stream) -            node = nodes.Pos(self.parse_unary(False), lineno=lineno) -        else: -            node = self.parse_primary() -        node = self.parse_postfix(node) -        if with_filter: -            node = self.parse_filter_expr(node) -        return node - -    def parse_primary(self) -> nodes.Expr: -        token = self.stream.current -        node: nodes.Expr -        if token.type == "name": -            if token.value in ("true", "false", "True", "False"): -                node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno) -            elif token.value in ("none", "None"): -                node = nodes.Const(None, lineno=token.lineno) -            else: -                node = nodes.Name(token.value, "load", lineno=token.lineno) -            next(self.stream) -        elif token.type == "string": -            next(self.stream) -            buf = [token.value] -            lineno = token.lineno -            while self.stream.current.type == "string": -                buf.append(self.stream.current.value) -                next(self.stream) -            node = nodes.Const("".join(buf), lineno=lineno) -        elif token.type in ("integer", "float"): -            next(self.stream) -            node = nodes.Const(token.value, lineno=token.lineno) -        elif token.type == "lparen": -            next(self.stream) -            node = self.parse_tuple(explicit_parentheses=True) -            self.stream.expect("rparen") -        elif token.type == "lbracket": -            node = self.parse_list() -        elif token.type == "lbrace": -            node = self.parse_dict() -        else: -            self.fail(f"unexpected {describe_token(token)!r}", token.lineno) -        return node - -    def parse_tuple( -        self, -        simplified: bool = False, -        with_condexpr: bool = True, -        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, -        explicit_parentheses: bool = False, -    ) -> t.Union[nodes.Tuple, nodes.Expr]: -        """Works like `parse_expression` but if multiple expressions are -        delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created. -        This method could also return a regular expression instead of a tuple -        if no commas where found. - -        The default parsing mode is a full tuple.  If `simplified` is `True` -        only names and literals are parsed.  The `no_condexpr` parameter is -        forwarded to :meth:`parse_expression`. - -        Because tuples do not require delimiters and may end in a bogus comma -        an extra hint is needed that marks the end of a tuple.  For example -        for loops support tuples between `for` and `in`.  In that case the -        `extra_end_rules` is set to ``['name:in']``. - -        `explicit_parentheses` is true if the parsing was triggered by an -        expression in parentheses.  This is used to figure out if an empty -        tuple is a valid expression or not. -        """ -        lineno = self.stream.current.lineno -        if simplified: -            parse = self.parse_primary -        elif with_condexpr: -            parse = self.parse_expression -        else: - -            def parse() -> nodes.Expr: -                return self.parse_expression(with_condexpr=False) - -        args: t.List[nodes.Expr] = [] -        is_tuple = False - -        while True: -            if args: -                self.stream.expect("comma") -            if self.is_tuple_end(extra_end_rules): -                break -            args.append(parse()) -            if self.stream.current.type == "comma": -                is_tuple = True -            else: -                break -            lineno = self.stream.current.lineno - -        if not is_tuple: -            if args: -                return args[0] - -            # if we don't have explicit parentheses, an empty tuple is -            # not a valid expression.  This would mean nothing (literally -            # nothing) in the spot of an expression would be an empty -            # tuple. -            if not explicit_parentheses: -                self.fail( -                    "Expected an expression," -                    f" got {describe_token(self.stream.current)!r}" -                ) - -        return nodes.Tuple(args, "load", lineno=lineno) - -    def parse_list(self) -> nodes.List: -        token = self.stream.expect("lbracket") -        items: t.List[nodes.Expr] = [] -        while self.stream.current.type != "rbracket": -            if items: -                self.stream.expect("comma") -            if self.stream.current.type == "rbracket": -                break -            items.append(self.parse_expression()) -        self.stream.expect("rbracket") -        return nodes.List(items, lineno=token.lineno) - -    def parse_dict(self) -> nodes.Dict: -        token = self.stream.expect("lbrace") -        items: t.List[nodes.Pair] = [] -        while self.stream.current.type != "rbrace": -            if items: -                self.stream.expect("comma") -            if self.stream.current.type == "rbrace": -                break -            key = self.parse_expression() -            self.stream.expect("colon") -            value = self.parse_expression() -            items.append(nodes.Pair(key, value, lineno=key.lineno)) -        self.stream.expect("rbrace") -        return nodes.Dict(items, lineno=token.lineno) - -    def parse_postfix(self, node: nodes.Expr) -> nodes.Expr: -        while True: -            token_type = self.stream.current.type -            if token_type == "dot" or token_type == "lbracket": -                node = self.parse_subscript(node) -            # calls are valid both after postfix expressions (getattr -            # and getitem) as well as filters and tests -            elif token_type == "lparen": -                node = self.parse_call(node) -            else: -                break -        return node - -    def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr: -        while True: -            token_type = self.stream.current.type -            if token_type == "pipe": -                node = self.parse_filter(node)  # type: ignore -            elif token_type == "name" and self.stream.current.value == "is": -                node = self.parse_test(node) -            # calls are valid both after postfix expressions (getattr -            # and getitem) as well as filters and tests -            elif token_type == "lparen": -                node = self.parse_call(node) -            else: -                break -        return node - -    def parse_subscript( -        self, node: nodes.Expr -    ) -> t.Union[nodes.Getattr, nodes.Getitem]: -        token = next(self.stream) -        arg: nodes.Expr - -        if token.type == "dot": -            attr_token = self.stream.current -            next(self.stream) -            if attr_token.type == "name": -                return nodes.Getattr( -                    node, attr_token.value, "load", lineno=token.lineno -                ) -            elif attr_token.type != "integer": -                self.fail("expected name or number", attr_token.lineno) -            arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) -            return nodes.Getitem(node, arg, "load", lineno=token.lineno) -        if token.type == "lbracket": -            args: t.List[nodes.Expr] = [] -            while self.stream.current.type != "rbracket": -                if args: -                    self.stream.expect("comma") -                args.append(self.parse_subscribed()) -            self.stream.expect("rbracket") -            if len(args) == 1: -                arg = args[0] -            else: -                arg = nodes.Tuple(args, "load", lineno=token.lineno) -            return nodes.Getitem(node, arg, "load", lineno=token.lineno) -        self.fail("expected subscript expression", token.lineno) - -    def parse_subscribed(self) -> nodes.Expr: -        lineno = self.stream.current.lineno -        args: t.List[t.Optional[nodes.Expr]] - -        if self.stream.current.type == "colon": -            next(self.stream) -            args = [None] -        else: -            node = self.parse_expression() -            if self.stream.current.type != "colon": -                return node -            next(self.stream) -            args = [node] - -        if self.stream.current.type == "colon": -            args.append(None) -        elif self.stream.current.type not in ("rbracket", "comma"): -            args.append(self.parse_expression()) -        else: -            args.append(None) - -        if self.stream.current.type == "colon": -            next(self.stream) -            if self.stream.current.type not in ("rbracket", "comma"): -                args.append(self.parse_expression()) -            else: -                args.append(None) -        else: -            args.append(None) - -        return nodes.Slice(lineno=lineno, *args)  # noqa: B026 - -    def parse_call_args(self) -> t.Tuple: -        token = self.stream.expect("lparen") -        args = [] -        kwargs = [] -        dyn_args = None -        dyn_kwargs = None -        require_comma = False - -        def ensure(expr: bool) -> None: -            if not expr: -                self.fail("invalid syntax for function call expression", token.lineno) - -        while self.stream.current.type != "rparen": -            if require_comma: -                self.stream.expect("comma") - -                # support for trailing comma -                if self.stream.current.type == "rparen": -                    break - -            if self.stream.current.type == "mul": -                ensure(dyn_args is None and dyn_kwargs is None) -                next(self.stream) -                dyn_args = self.parse_expression() -            elif self.stream.current.type == "pow": -                ensure(dyn_kwargs is None) -                next(self.stream) -                dyn_kwargs = self.parse_expression() -            else: -                if ( -                    self.stream.current.type == "name" -                    and self.stream.look().type == "assign" -                ): -                    # Parsing a kwarg -                    ensure(dyn_kwargs is None) -                    key = self.stream.current.value -                    self.stream.skip(2) -                    value = self.parse_expression() -                    kwargs.append(nodes.Keyword(key, value, lineno=value.lineno)) -                else: -                    # Parsing an arg -                    ensure(dyn_args is None and dyn_kwargs is None and not kwargs) -                    args.append(self.parse_expression()) - -            require_comma = True - -        self.stream.expect("rparen") -        return args, kwargs, dyn_args, dyn_kwargs - -    def parse_call(self, node: nodes.Expr) -> nodes.Call: -        # The lparen will be expected in parse_call_args, but the lineno -        # needs to be recorded before the stream is advanced. -        token = self.stream.current -        args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() -        return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) - -    def parse_filter( -        self, node: t.Optional[nodes.Expr], start_inline: bool = False -    ) -> t.Optional[nodes.Expr]: -        while self.stream.current.type == "pipe" or start_inline: -            if not start_inline: -                next(self.stream) -            token = self.stream.expect("name") -            name = token.value -            while self.stream.current.type == "dot": -                next(self.stream) -                name += "." + self.stream.expect("name").value -            if self.stream.current.type == "lparen": -                args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() -            else: -                args = [] -                kwargs = [] -                dyn_args = dyn_kwargs = None -            node = nodes.Filter( -                node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno -            ) -            start_inline = False -        return node - -    def parse_test(self, node: nodes.Expr) -> nodes.Expr: -        token = next(self.stream) -        if self.stream.current.test("name:not"): -            next(self.stream) -            negated = True -        else: -            negated = False -        name = self.stream.expect("name").value -        while self.stream.current.type == "dot": -            next(self.stream) -            name += "." + self.stream.expect("name").value -        dyn_args = dyn_kwargs = None -        kwargs = [] -        if self.stream.current.type == "lparen": -            args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() -        elif self.stream.current.type in { -            "name", -            "string", -            "integer", -            "float", -            "lparen", -            "lbracket", -            "lbrace", -        } and not self.stream.current.test_any("name:else", "name:or", "name:and"): -            if self.stream.current.test("name:is"): -                self.fail("You cannot chain multiple tests with is") -            arg_node = self.parse_primary() -            arg_node = self.parse_postfix(arg_node) -            args = [arg_node] -        else: -            args = [] -        node = nodes.Test( -            node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno -        ) -        if negated: -            node = nodes.Not(node, lineno=token.lineno) -        return node - -    def subparse( -        self, end_tokens: t.Optional[t.Tuple[str, ...]] = None -    ) -> t.List[nodes.Node]: -        body: t.List[nodes.Node] = [] -        data_buffer: t.List[nodes.Node] = [] -        add_data = data_buffer.append - -        if end_tokens is not None: -            self._end_token_stack.append(end_tokens) - -        def flush_data() -> None: -            if data_buffer: -                lineno = data_buffer[0].lineno -                body.append(nodes.Output(data_buffer[:], lineno=lineno)) -                del data_buffer[:] - -        try: -            while self.stream: -                token = self.stream.current -                if token.type == "data": -                    if token.value: -                        add_data(nodes.TemplateData(token.value, lineno=token.lineno)) -                    next(self.stream) -                elif token.type == "variable_begin": -                    next(self.stream) -                    add_data(self.parse_tuple(with_condexpr=True)) -                    self.stream.expect("variable_end") -                elif token.type == "block_begin": -                    flush_data() -                    next(self.stream) -                    if end_tokens is not None and self.stream.current.test_any( -                        *end_tokens -                    ): -                        return body -                    rv = self.parse_statement() -                    if isinstance(rv, list): -                        body.extend(rv) -                    else: -                        body.append(rv) -                    self.stream.expect("block_end") -                else: -                    raise AssertionError("internal parsing error") - -            flush_data() -        finally: -            if end_tokens is not None: -                self._end_token_stack.pop() -        return body - -    def parse(self) -> nodes.Template: -        """Parse the whole template into a `Template` node.""" -        result = nodes.Template(self.subparse(), lineno=1) -        result.set_environment(self.environment) -        return result diff --git a/venv/lib/python3.11/site-packages/jinja2/py.typed b/venv/lib/python3.11/site-packages/jinja2/py.typed deleted file mode 100644 index e69de29..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/py.typed +++ /dev/null diff --git a/venv/lib/python3.11/site-packages/jinja2/runtime.py b/venv/lib/python3.11/site-packages/jinja2/runtime.py deleted file mode 100644 index 58a540b..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/runtime.py +++ /dev/null @@ -1,1051 +0,0 @@ -"""The runtime functions and state used by compiled templates.""" -import functools -import sys -import typing as t -from collections import abc -from itertools import chain - -from markupsafe import escape  # noqa: F401 -from markupsafe import Markup -from markupsafe import soft_str - -from .async_utils import auto_aiter -from .async_utils import auto_await  # noqa: F401 -from .exceptions import TemplateNotFound  # noqa: F401 -from .exceptions import TemplateRuntimeError  # noqa: F401 -from .exceptions import UndefinedError -from .nodes import EvalContext -from .utils import _PassArg -from .utils import concat -from .utils import internalcode -from .utils import missing -from .utils import Namespace  # noqa: F401 -from .utils import object_type_repr -from .utils import pass_eval_context - -V = t.TypeVar("V") -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -if t.TYPE_CHECKING: -    import logging -    import typing_extensions as te -    from .environment import Environment - -    class LoopRenderFunc(te.Protocol): -        def __call__( -            self, -            reciter: t.Iterable[V], -            loop_render_func: "LoopRenderFunc", -            depth: int = 0, -        ) -> str: -            ... - - -# these variables are exported to the template runtime -exported = [ -    "LoopContext", -    "TemplateReference", -    "Macro", -    "Markup", -    "TemplateRuntimeError", -    "missing", -    "escape", -    "markup_join", -    "str_join", -    "identity", -    "TemplateNotFound", -    "Namespace", -    "Undefined", -    "internalcode", -] -async_exported = [ -    "AsyncLoopContext", -    "auto_aiter", -    "auto_await", -] - - -def identity(x: V) -> V: -    """Returns its argument. Useful for certain things in the -    environment. -    """ -    return x - - -def markup_join(seq: t.Iterable[t.Any]) -> str: -    """Concatenation that escapes if necessary and converts to string.""" -    buf = [] -    iterator = map(soft_str, seq) -    for arg in iterator: -        buf.append(arg) -        if hasattr(arg, "__html__"): -            return Markup("").join(chain(buf, iterator)) -    return concat(buf) - - -def str_join(seq: t.Iterable[t.Any]) -> str: -    """Simple args to string conversion and concatenation.""" -    return concat(map(str, seq)) - - -def new_context( -    environment: "Environment", -    template_name: t.Optional[str], -    blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], -    vars: t.Optional[t.Dict[str, t.Any]] = None, -    shared: bool = False, -    globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    locals: t.Optional[t.Mapping[str, t.Any]] = None, -) -> "Context": -    """Internal helper for context creation.""" -    if vars is None: -        vars = {} -    if shared: -        parent = vars -    else: -        parent = dict(globals or (), **vars) -    if locals: -        # if the parent is shared a copy should be created because -        # we don't want to modify the dict passed -        if shared: -            parent = dict(parent) -        for key, value in locals.items(): -            if value is not missing: -                parent[key] = value -    return environment.context_class( -        environment, parent, template_name, blocks, globals=globals -    ) - - -class TemplateReference: -    """The `self` in templates.""" - -    def __init__(self, context: "Context") -> None: -        self.__context = context - -    def __getitem__(self, name: str) -> t.Any: -        blocks = self.__context.blocks[name] -        return BlockReference(name, self.__context, blocks, 0) - -    def __repr__(self) -> str: -        return f"<{type(self).__name__} {self.__context.name!r}>" - - -def _dict_method_all(dict_method: F) -> F: -    @functools.wraps(dict_method) -    def f_all(self: "Context") -> t.Any: -        return dict_method(self.get_all()) - -    return t.cast(F, f_all) - - -@abc.Mapping.register -class Context: -    """The template context holds the variables of a template.  It stores the -    values passed to the template and also the names the template exports. -    Creating instances is neither supported nor useful as it's created -    automatically at various stages of the template evaluation and should not -    be created by hand. - -    The context is immutable.  Modifications on :attr:`parent` **must not** -    happen and modifications on :attr:`vars` are allowed from generated -    template code only.  Template filters and global functions marked as -    :func:`pass_context` get the active context passed as first argument -    and are allowed to access the context read-only. - -    The template context supports read only dict operations (`get`, -    `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`, -    `__getitem__`, `__contains__`).  Additionally there is a :meth:`resolve` -    method that doesn't fail with a `KeyError` but returns an -    :class:`Undefined` object for missing variables. -    """ - -    def __init__( -        self, -        environment: "Environment", -        parent: t.Dict[str, t.Any], -        name: t.Optional[str], -        blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], -        globals: t.Optional[t.MutableMapping[str, t.Any]] = None, -    ): -        self.parent = parent -        self.vars: t.Dict[str, t.Any] = {} -        self.environment: "Environment" = environment -        self.eval_ctx = EvalContext(self.environment, name) -        self.exported_vars: t.Set[str] = set() -        self.name = name -        self.globals_keys = set() if globals is None else set(globals) - -        # create the initial mapping of blocks.  Whenever template inheritance -        # takes place the runtime will update this mapping with the new blocks -        # from the template. -        self.blocks = {k: [v] for k, v in blocks.items()} - -    def super( -        self, name: str, current: t.Callable[["Context"], t.Iterator[str]] -    ) -> t.Union["BlockReference", "Undefined"]: -        """Render a parent block.""" -        try: -            blocks = self.blocks[name] -            index = blocks.index(current) + 1 -            blocks[index] -        except LookupError: -            return self.environment.undefined( -                f"there is no parent block called {name!r}.", name="super" -            ) -        return BlockReference(name, self, blocks, index) - -    def get(self, key: str, default: t.Any = None) -> t.Any: -        """Look up a variable by name, or return a default if the key is -        not found. - -        :param key: The variable name to look up. -        :param default: The value to return if the key is not found. -        """ -        try: -            return self[key] -        except KeyError: -            return default - -    def resolve(self, key: str) -> t.Union[t.Any, "Undefined"]: -        """Look up a variable by name, or return an :class:`Undefined` -        object if the key is not found. - -        If you need to add custom behavior, override -        :meth:`resolve_or_missing`, not this method. The various lookup -        functions use that method, not this one. - -        :param key: The variable name to look up. -        """ -        rv = self.resolve_or_missing(key) - -        if rv is missing: -            return self.environment.undefined(name=key) - -        return rv - -    def resolve_or_missing(self, key: str) -> t.Any: -        """Look up a variable by name, or return a ``missing`` sentinel -        if the key is not found. - -        Override this method to add custom lookup behavior. -        :meth:`resolve`, :meth:`get`, and :meth:`__getitem__` use this -        method. Don't call this method directly. - -        :param key: The variable name to look up. -        """ -        if key in self.vars: -            return self.vars[key] - -        if key in self.parent: -            return self.parent[key] - -        return missing - -    def get_exported(self) -> t.Dict[str, t.Any]: -        """Get a new dict with the exported variables.""" -        return {k: self.vars[k] for k in self.exported_vars} - -    def get_all(self) -> t.Dict[str, t.Any]: -        """Return the complete context as dict including the exported -        variables.  For optimizations reasons this might not return an -        actual copy so be careful with using it. -        """ -        if not self.vars: -            return self.parent -        if not self.parent: -            return self.vars -        return dict(self.parent, **self.vars) - -    @internalcode -    def call( -        __self, __obj: t.Callable, *args: t.Any, **kwargs: t.Any  # noqa: B902 -    ) -> t.Union[t.Any, "Undefined"]: -        """Call the callable with the arguments and keyword arguments -        provided but inject the active context or environment as first -        argument if the callable has :func:`pass_context` or -        :func:`pass_environment`. -        """ -        if __debug__: -            __traceback_hide__ = True  # noqa - -        # Allow callable classes to take a context -        if ( -            hasattr(__obj, "__call__")  # noqa: B004 -            and _PassArg.from_obj(__obj.__call__) is not None -        ): -            __obj = __obj.__call__ - -        pass_arg = _PassArg.from_obj(__obj) - -        if pass_arg is _PassArg.context: -            # the active context should have access to variables set in -            # loops and blocks without mutating the context itself -            if kwargs.get("_loop_vars"): -                __self = __self.derived(kwargs["_loop_vars"]) -            if kwargs.get("_block_vars"): -                __self = __self.derived(kwargs["_block_vars"]) -            args = (__self,) + args -        elif pass_arg is _PassArg.eval_context: -            args = (__self.eval_ctx,) + args -        elif pass_arg is _PassArg.environment: -            args = (__self.environment,) + args - -        kwargs.pop("_block_vars", None) -        kwargs.pop("_loop_vars", None) - -        try: -            return __obj(*args, **kwargs) -        except StopIteration: -            return __self.environment.undefined( -                "value was undefined because a callable raised a" -                " StopIteration exception" -            ) - -    def derived(self, locals: t.Optional[t.Dict[str, t.Any]] = None) -> "Context": -        """Internal helper function to create a derived context.  This is -        used in situations where the system needs a new context in the same -        template that is independent. -        """ -        context = new_context( -            self.environment, self.name, {}, self.get_all(), True, None, locals -        ) -        context.eval_ctx = self.eval_ctx -        context.blocks.update((k, list(v)) for k, v in self.blocks.items()) -        return context - -    keys = _dict_method_all(dict.keys) -    values = _dict_method_all(dict.values) -    items = _dict_method_all(dict.items) - -    def __contains__(self, name: str) -> bool: -        return name in self.vars or name in self.parent - -    def __getitem__(self, key: str) -> t.Any: -        """Look up a variable by name with ``[]`` syntax, or raise a -        ``KeyError`` if the key is not found. -        """ -        item = self.resolve_or_missing(key) - -        if item is missing: -            raise KeyError(key) - -        return item - -    def __repr__(self) -> str: -        return f"<{type(self).__name__} {self.get_all()!r} of {self.name!r}>" - - -class BlockReference: -    """One block on a template reference.""" - -    def __init__( -        self, -        name: str, -        context: "Context", -        stack: t.List[t.Callable[["Context"], t.Iterator[str]]], -        depth: int, -    ) -> None: -        self.name = name -        self._context = context -        self._stack = stack -        self._depth = depth - -    @property -    def super(self) -> t.Union["BlockReference", "Undefined"]: -        """Super the block.""" -        if self._depth + 1 >= len(self._stack): -            return self._context.environment.undefined( -                f"there is no parent block called {self.name!r}.", name="super" -            ) -        return BlockReference(self.name, self._context, self._stack, self._depth + 1) - -    @internalcode -    async def _async_call(self) -> str: -        rv = concat( -            [x async for x in self._stack[self._depth](self._context)]  # type: ignore -        ) - -        if self._context.eval_ctx.autoescape: -            return Markup(rv) - -        return rv - -    @internalcode -    def __call__(self) -> str: -        if self._context.environment.is_async: -            return self._async_call()  # type: ignore - -        rv = concat(self._stack[self._depth](self._context)) - -        if self._context.eval_ctx.autoescape: -            return Markup(rv) - -        return rv - - -class LoopContext: -    """A wrapper iterable for dynamic ``for`` loops, with information -    about the loop and iteration. -    """ - -    #: Current iteration of the loop, starting at 0. -    index0 = -1 - -    _length: t.Optional[int] = None -    _after: t.Any = missing -    _current: t.Any = missing -    _before: t.Any = missing -    _last_changed_value: t.Any = missing - -    def __init__( -        self, -        iterable: t.Iterable[V], -        undefined: t.Type["Undefined"], -        recurse: t.Optional["LoopRenderFunc"] = None, -        depth0: int = 0, -    ) -> None: -        """ -        :param iterable: Iterable to wrap. -        :param undefined: :class:`Undefined` class to use for next and -            previous items. -        :param recurse: The function to render the loop body when the -            loop is marked recursive. -        :param depth0: Incremented when looping recursively. -        """ -        self._iterable = iterable -        self._iterator = self._to_iterator(iterable) -        self._undefined = undefined -        self._recurse = recurse -        #: How many levels deep a recursive loop currently is, starting at 0. -        self.depth0 = depth0 - -    @staticmethod -    def _to_iterator(iterable: t.Iterable[V]) -> t.Iterator[V]: -        return iter(iterable) - -    @property -    def length(self) -> int: -        """Length of the iterable. - -        If the iterable is a generator or otherwise does not have a -        size, it is eagerly evaluated to get a size. -        """ -        if self._length is not None: -            return self._length - -        try: -            self._length = len(self._iterable)  # type: ignore -        except TypeError: -            iterable = list(self._iterator) -            self._iterator = self._to_iterator(iterable) -            self._length = len(iterable) + self.index + (self._after is not missing) - -        return self._length - -    def __len__(self) -> int: -        return self.length - -    @property -    def depth(self) -> int: -        """How many levels deep a recursive loop currently is, starting at 1.""" -        return self.depth0 + 1 - -    @property -    def index(self) -> int: -        """Current iteration of the loop, starting at 1.""" -        return self.index0 + 1 - -    @property -    def revindex0(self) -> int: -        """Number of iterations from the end of the loop, ending at 0. - -        Requires calculating :attr:`length`. -        """ -        return self.length - self.index - -    @property -    def revindex(self) -> int: -        """Number of iterations from the end of the loop, ending at 1. - -        Requires calculating :attr:`length`. -        """ -        return self.length - self.index0 - -    @property -    def first(self) -> bool: -        """Whether this is the first iteration of the loop.""" -        return self.index0 == 0 - -    def _peek_next(self) -> t.Any: -        """Return the next element in the iterable, or :data:`missing` -        if the iterable is exhausted. Only peeks one item ahead, caching -        the result in :attr:`_last` for use in subsequent checks. The -        cache is reset when :meth:`__next__` is called. -        """ -        if self._after is not missing: -            return self._after - -        self._after = next(self._iterator, missing) -        return self._after - -    @property -    def last(self) -> bool: -        """Whether this is the last iteration of the loop. - -        Causes the iterable to advance early. See -        :func:`itertools.groupby` for issues this can cause. -        The :func:`groupby` filter avoids that issue. -        """ -        return self._peek_next() is missing - -    @property -    def previtem(self) -> t.Union[t.Any, "Undefined"]: -        """The item in the previous iteration. Undefined during the -        first iteration. -        """ -        if self.first: -            return self._undefined("there is no previous item") - -        return self._before - -    @property -    def nextitem(self) -> t.Union[t.Any, "Undefined"]: -        """The item in the next iteration. Undefined during the last -        iteration. - -        Causes the iterable to advance early. See -        :func:`itertools.groupby` for issues this can cause. -        The :func:`jinja-filters.groupby` filter avoids that issue. -        """ -        rv = self._peek_next() - -        if rv is missing: -            return self._undefined("there is no next item") - -        return rv - -    def cycle(self, *args: V) -> V: -        """Return a value from the given args, cycling through based on -        the current :attr:`index0`. - -        :param args: One or more values to cycle through. -        """ -        if not args: -            raise TypeError("no items for cycling given") - -        return args[self.index0 % len(args)] - -    def changed(self, *value: t.Any) -> bool: -        """Return ``True`` if previously called with a different value -        (including when called for the first time). - -        :param value: One or more values to compare to the last call. -        """ -        if self._last_changed_value != value: -            self._last_changed_value = value -            return True - -        return False - -    def __iter__(self) -> "LoopContext": -        return self - -    def __next__(self) -> t.Tuple[t.Any, "LoopContext"]: -        if self._after is not missing: -            rv = self._after -            self._after = missing -        else: -            rv = next(self._iterator) - -        self.index0 += 1 -        self._before = self._current -        self._current = rv -        return rv, self - -    @internalcode -    def __call__(self, iterable: t.Iterable[V]) -> str: -        """When iterating over nested data, render the body of the loop -        recursively with the given inner iterable data. - -        The loop must have the ``recursive`` marker for this to work. -        """ -        if self._recurse is None: -            raise TypeError( -                "The loop must have the 'recursive' marker to be called recursively." -            ) - -        return self._recurse(iterable, self._recurse, depth=self.depth) - -    def __repr__(self) -> str: -        return f"<{type(self).__name__} {self.index}/{self.length}>" - - -class AsyncLoopContext(LoopContext): -    _iterator: t.AsyncIterator[t.Any]  # type: ignore - -    @staticmethod -    def _to_iterator(  # type: ignore -        iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]] -    ) -> t.AsyncIterator[V]: -        return auto_aiter(iterable) - -    @property -    async def length(self) -> int:  # type: ignore -        if self._length is not None: -            return self._length - -        try: -            self._length = len(self._iterable)  # type: ignore -        except TypeError: -            iterable = [x async for x in self._iterator] -            self._iterator = self._to_iterator(iterable) -            self._length = len(iterable) + self.index + (self._after is not missing) - -        return self._length - -    @property -    async def revindex0(self) -> int:  # type: ignore -        return await self.length - self.index - -    @property -    async def revindex(self) -> int:  # type: ignore -        return await self.length - self.index0 - -    async def _peek_next(self) -> t.Any: -        if self._after is not missing: -            return self._after - -        try: -            self._after = await self._iterator.__anext__() -        except StopAsyncIteration: -            self._after = missing - -        return self._after - -    @property -    async def last(self) -> bool:  # type: ignore -        return await self._peek_next() is missing - -    @property -    async def nextitem(self) -> t.Union[t.Any, "Undefined"]: -        rv = await self._peek_next() - -        if rv is missing: -            return self._undefined("there is no next item") - -        return rv - -    def __aiter__(self) -> "AsyncLoopContext": -        return self - -    async def __anext__(self) -> t.Tuple[t.Any, "AsyncLoopContext"]: -        if self._after is not missing: -            rv = self._after -            self._after = missing -        else: -            rv = await self._iterator.__anext__() - -        self.index0 += 1 -        self._before = self._current -        self._current = rv -        return rv, self - - -class Macro: -    """Wraps a macro function.""" - -    def __init__( -        self, -        environment: "Environment", -        func: t.Callable[..., str], -        name: str, -        arguments: t.List[str], -        catch_kwargs: bool, -        catch_varargs: bool, -        caller: bool, -        default_autoescape: t.Optional[bool] = None, -    ): -        self._environment = environment -        self._func = func -        self._argument_count = len(arguments) -        self.name = name -        self.arguments = arguments -        self.catch_kwargs = catch_kwargs -        self.catch_varargs = catch_varargs -        self.caller = caller -        self.explicit_caller = "caller" in arguments - -        if default_autoescape is None: -            if callable(environment.autoescape): -                default_autoescape = environment.autoescape(None) -            else: -                default_autoescape = environment.autoescape - -        self._default_autoescape = default_autoescape - -    @internalcode -    @pass_eval_context -    def __call__(self, *args: t.Any, **kwargs: t.Any) -> str: -        # This requires a bit of explanation,  In the past we used to -        # decide largely based on compile-time information if a macro is -        # safe or unsafe.  While there was a volatile mode it was largely -        # unused for deciding on escaping.  This turns out to be -        # problematic for macros because whether a macro is safe depends not -        # on the escape mode when it was defined, but rather when it was used. -        # -        # Because however we export macros from the module system and -        # there are historic callers that do not pass an eval context (and -        # will continue to not pass one), we need to perform an instance -        # check here. -        # -        # This is considered safe because an eval context is not a valid -        # argument to callables otherwise anyway.  Worst case here is -        # that if no eval context is passed we fall back to the compile -        # time autoescape flag. -        if args and isinstance(args[0], EvalContext): -            autoescape = args[0].autoescape -            args = args[1:] -        else: -            autoescape = self._default_autoescape - -        # try to consume the positional arguments -        arguments = list(args[: self._argument_count]) -        off = len(arguments) - -        # For information why this is necessary refer to the handling -        # of caller in the `macro_body` handler in the compiler. -        found_caller = False - -        # if the number of arguments consumed is not the number of -        # arguments expected we start filling in keyword arguments -        # and defaults. -        if off != self._argument_count: -            for name in self.arguments[len(arguments) :]: -                try: -                    value = kwargs.pop(name) -                except KeyError: -                    value = missing -                if name == "caller": -                    found_caller = True -                arguments.append(value) -        else: -            found_caller = self.explicit_caller - -        # it's important that the order of these arguments does not change -        # if not also changed in the compiler's `function_scoping` method. -        # the order is caller, keyword arguments, positional arguments! -        if self.caller and not found_caller: -            caller = kwargs.pop("caller", None) -            if caller is None: -                caller = self._environment.undefined("No caller defined", name="caller") -            arguments.append(caller) - -        if self.catch_kwargs: -            arguments.append(kwargs) -        elif kwargs: -            if "caller" in kwargs: -                raise TypeError( -                    f"macro {self.name!r} was invoked with two values for the special" -                    " caller argument. This is most likely a bug." -                ) -            raise TypeError( -                f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}" -            ) -        if self.catch_varargs: -            arguments.append(args[self._argument_count :]) -        elif len(args) > self._argument_count: -            raise TypeError( -                f"macro {self.name!r} takes not more than" -                f" {len(self.arguments)} argument(s)" -            ) - -        return self._invoke(arguments, autoescape) - -    async def _async_invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: -        rv = await self._func(*arguments)  # type: ignore - -        if autoescape: -            return Markup(rv) - -        return rv  # type: ignore - -    def _invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: -        if self._environment.is_async: -            return self._async_invoke(arguments, autoescape)  # type: ignore - -        rv = self._func(*arguments) - -        if autoescape: -            rv = Markup(rv) - -        return rv - -    def __repr__(self) -> str: -        name = "anonymous" if self.name is None else repr(self.name) -        return f"<{type(self).__name__} {name}>" - - -class Undefined: -    """The default undefined type.  This undefined type can be printed and -    iterated over, but every other access will raise an :exc:`UndefinedError`: - -    >>> foo = Undefined(name='foo') -    >>> str(foo) -    '' -    >>> not foo -    True -    >>> foo + 42 -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined -    """ - -    __slots__ = ( -        "_undefined_hint", -        "_undefined_obj", -        "_undefined_name", -        "_undefined_exception", -    ) - -    def __init__( -        self, -        hint: t.Optional[str] = None, -        obj: t.Any = missing, -        name: t.Optional[str] = None, -        exc: t.Type[TemplateRuntimeError] = UndefinedError, -    ) -> None: -        self._undefined_hint = hint -        self._undefined_obj = obj -        self._undefined_name = name -        self._undefined_exception = exc - -    @property -    def _undefined_message(self) -> str: -        """Build a message about the undefined value based on how it was -        accessed. -        """ -        if self._undefined_hint: -            return self._undefined_hint - -        if self._undefined_obj is missing: -            return f"{self._undefined_name!r} is undefined" - -        if not isinstance(self._undefined_name, str): -            return ( -                f"{object_type_repr(self._undefined_obj)} has no" -                f" element {self._undefined_name!r}" -            ) - -        return ( -            f"{object_type_repr(self._undefined_obj)!r} has no" -            f" attribute {self._undefined_name!r}" -        ) - -    @internalcode -    def _fail_with_undefined_error( -        self, *args: t.Any, **kwargs: t.Any -    ) -> "te.NoReturn": -        """Raise an :exc:`UndefinedError` when operations are performed -        on the undefined value. -        """ -        raise self._undefined_exception(self._undefined_message) - -    @internalcode -    def __getattr__(self, name: str) -> t.Any: -        if name[:2] == "__": -            raise AttributeError(name) - -        return self._fail_with_undefined_error() - -    __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error -    __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error -    __truediv__ = __rtruediv__ = _fail_with_undefined_error -    __floordiv__ = __rfloordiv__ = _fail_with_undefined_error -    __mod__ = __rmod__ = _fail_with_undefined_error -    __pos__ = __neg__ = _fail_with_undefined_error -    __call__ = __getitem__ = _fail_with_undefined_error -    __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error -    __int__ = __float__ = __complex__ = _fail_with_undefined_error -    __pow__ = __rpow__ = _fail_with_undefined_error - -    def __eq__(self, other: t.Any) -> bool: -        return type(self) is type(other) - -    def __ne__(self, other: t.Any) -> bool: -        return not self.__eq__(other) - -    def __hash__(self) -> int: -        return id(type(self)) - -    def __str__(self) -> str: -        return "" - -    def __len__(self) -> int: -        return 0 - -    def __iter__(self) -> t.Iterator[t.Any]: -        yield from () - -    async def __aiter__(self) -> t.AsyncIterator[t.Any]: -        for _ in (): -            yield - -    def __bool__(self) -> bool: -        return False - -    def __repr__(self) -> str: -        return "Undefined" - - -def make_logging_undefined( -    logger: t.Optional["logging.Logger"] = None, base: t.Type[Undefined] = Undefined -) -> t.Type[Undefined]: -    """Given a logger object this returns a new undefined class that will -    log certain failures.  It will log iterations and printing.  If no -    logger is given a default logger is created. - -    Example:: - -        logger = logging.getLogger(__name__) -        LoggingUndefined = make_logging_undefined( -            logger=logger, -            base=Undefined -        ) - -    .. versionadded:: 2.8 - -    :param logger: the logger to use.  If not provided, a default logger -                   is created. -    :param base: the base class to add logging functionality to.  This -                 defaults to :class:`Undefined`. -    """ -    if logger is None: -        import logging - -        logger = logging.getLogger(__name__) -        logger.addHandler(logging.StreamHandler(sys.stderr)) - -    def _log_message(undef: Undefined) -> None: -        logger.warning("Template variable warning: %s", undef._undefined_message) - -    class LoggingUndefined(base):  # type: ignore -        __slots__ = () - -        def _fail_with_undefined_error(  # type: ignore -            self, *args: t.Any, **kwargs: t.Any -        ) -> "te.NoReturn": -            try: -                super()._fail_with_undefined_error(*args, **kwargs) -            except self._undefined_exception as e: -                logger.error("Template variable error: %s", e)  # type: ignore -                raise e - -        def __str__(self) -> str: -            _log_message(self) -            return super().__str__()  # type: ignore - -        def __iter__(self) -> t.Iterator[t.Any]: -            _log_message(self) -            return super().__iter__()  # type: ignore - -        def __bool__(self) -> bool: -            _log_message(self) -            return super().__bool__()  # type: ignore - -    return LoggingUndefined - - -class ChainableUndefined(Undefined): -    """An undefined that is chainable, where both ``__getattr__`` and -    ``__getitem__`` return itself rather than raising an -    :exc:`UndefinedError`. - -    >>> foo = ChainableUndefined(name='foo') -    >>> str(foo.bar['baz']) -    '' -    >>> foo.bar['baz'] + 42 -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined - -    .. versionadded:: 2.11.0 -    """ - -    __slots__ = () - -    def __html__(self) -> str: -        return str(self) - -    def __getattr__(self, _: str) -> "ChainableUndefined": -        return self - -    __getitem__ = __getattr__  # type: ignore - - -class DebugUndefined(Undefined): -    """An undefined that returns the debug info when printed. - -    >>> foo = DebugUndefined(name='foo') -    >>> str(foo) -    '{{ foo }}' -    >>> not foo -    True -    >>> foo + 42 -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined -    """ - -    __slots__ = () - -    def __str__(self) -> str: -        if self._undefined_hint: -            message = f"undefined value printed: {self._undefined_hint}" - -        elif self._undefined_obj is missing: -            message = self._undefined_name  # type: ignore - -        else: -            message = ( -                f"no such element: {object_type_repr(self._undefined_obj)}" -                f"[{self._undefined_name!r}]" -            ) - -        return f"{{{{ {message} }}}}" - - -class StrictUndefined(Undefined): -    """An undefined that barks on print and iteration as well as boolean -    tests and all kinds of comparisons.  In other words: you can do nothing -    with it except checking if it's defined using the `defined` test. - -    >>> foo = StrictUndefined(name='foo') -    >>> str(foo) -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined -    >>> not foo -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined -    >>> foo + 42 -    Traceback (most recent call last): -      ... -    jinja2.exceptions.UndefinedError: 'foo' is undefined -    """ - -    __slots__ = () -    __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error -    __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error -    __contains__ = Undefined._fail_with_undefined_error - - -# Remove slots attributes, after the metaclass is applied they are -# unneeded and contain wrong data for subclasses. -del ( -    Undefined.__slots__, -    ChainableUndefined.__slots__, -    DebugUndefined.__slots__, -    StrictUndefined.__slots__, -) diff --git a/venv/lib/python3.11/site-packages/jinja2/sandbox.py b/venv/lib/python3.11/site-packages/jinja2/sandbox.py deleted file mode 100644 index 06d7414..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/sandbox.py +++ /dev/null @@ -1,428 +0,0 @@ -"""A sandbox layer that ensures unsafe operations cannot be performed. -Useful when the template itself comes from an untrusted source. -""" -import operator -import types -import typing as t -from _string import formatter_field_name_split  # type: ignore -from collections import abc -from collections import deque -from string import Formatter - -from markupsafe import EscapeFormatter -from markupsafe import Markup - -from .environment import Environment -from .exceptions import SecurityError -from .runtime import Context -from .runtime import Undefined - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -#: maximum number of items a range may produce -MAX_RANGE = 100000 - -#: Unsafe function attributes. -UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set() - -#: Unsafe method attributes. Function attributes are unsafe for methods too. -UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set() - -#: unsafe generator attributes. -UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"} - -#: unsafe attributes on coroutines -UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"} - -#: unsafe attributes on async generators -UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"} - -_mutable_spec: t.Tuple[t.Tuple[t.Type, t.FrozenSet[str]], ...] = ( -    ( -        abc.MutableSet, -        frozenset( -            [ -                "add", -                "clear", -                "difference_update", -                "discard", -                "pop", -                "remove", -                "symmetric_difference_update", -                "update", -            ] -        ), -    ), -    ( -        abc.MutableMapping, -        frozenset(["clear", "pop", "popitem", "setdefault", "update"]), -    ), -    ( -        abc.MutableSequence, -        frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]), -    ), -    ( -        deque, -        frozenset( -            [ -                "append", -                "appendleft", -                "clear", -                "extend", -                "extendleft", -                "pop", -                "popleft", -                "remove", -                "rotate", -            ] -        ), -    ), -) - - -def inspect_format_method(callable: t.Callable) -> t.Optional[str]: -    if not isinstance( -        callable, (types.MethodType, types.BuiltinMethodType) -    ) or callable.__name__ not in ("format", "format_map"): -        return None - -    obj = callable.__self__ - -    if isinstance(obj, str): -        return obj - -    return None - - -def safe_range(*args: int) -> range: -    """A range that can't generate ranges with a length of more than -    MAX_RANGE items. -    """ -    rng = range(*args) - -    if len(rng) > MAX_RANGE: -        raise OverflowError( -            "Range too big. The sandbox blocks ranges larger than" -            f" MAX_RANGE ({MAX_RANGE})." -        ) - -    return rng - - -def unsafe(f: F) -> F: -    """Marks a function or method as unsafe. - -    .. code-block: python - -        @unsafe -        def delete(self): -            pass -    """ -    f.unsafe_callable = True  # type: ignore -    return f - - -def is_internal_attribute(obj: t.Any, attr: str) -> bool: -    """Test if the attribute given is an internal python attribute.  For -    example this function returns `True` for the `func_code` attribute of -    python objects.  This is useful if the environment method -    :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden. - -    >>> from jinja2.sandbox import is_internal_attribute -    >>> is_internal_attribute(str, "mro") -    True -    >>> is_internal_attribute(str, "upper") -    False -    """ -    if isinstance(obj, types.FunctionType): -        if attr in UNSAFE_FUNCTION_ATTRIBUTES: -            return True -    elif isinstance(obj, types.MethodType): -        if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES: -            return True -    elif isinstance(obj, type): -        if attr == "mro": -            return True -    elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)): -        return True -    elif isinstance(obj, types.GeneratorType): -        if attr in UNSAFE_GENERATOR_ATTRIBUTES: -            return True -    elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType): -        if attr in UNSAFE_COROUTINE_ATTRIBUTES: -            return True -    elif hasattr(types, "AsyncGeneratorType") and isinstance( -        obj, types.AsyncGeneratorType -    ): -        if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES: -            return True -    return attr.startswith("__") - - -def modifies_known_mutable(obj: t.Any, attr: str) -> bool: -    """This function checks if an attribute on a builtin mutable object -    (list, dict, set or deque) or the corresponding ABCs would modify it -    if called. - -    >>> modifies_known_mutable({}, "clear") -    True -    >>> modifies_known_mutable({}, "keys") -    False -    >>> modifies_known_mutable([], "append") -    True -    >>> modifies_known_mutable([], "index") -    False - -    If called with an unsupported object, ``False`` is returned. - -    >>> modifies_known_mutable("foo", "upper") -    False -    """ -    for typespec, unsafe in _mutable_spec: -        if isinstance(obj, typespec): -            return attr in unsafe -    return False - - -class SandboxedEnvironment(Environment): -    """The sandboxed environment.  It works like the regular environment but -    tells the compiler to generate sandboxed code.  Additionally subclasses of -    this environment may override the methods that tell the runtime what -    attributes or functions are safe to access. - -    If the template tries to access insecure code a :exc:`SecurityError` is -    raised.  However also other exceptions may occur during the rendering so -    the caller has to ensure that all exceptions are caught. -    """ - -    sandboxed = True - -    #: default callback table for the binary operators.  A copy of this is -    #: available on each instance of a sandboxed environment as -    #: :attr:`binop_table` -    default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { -        "+": operator.add, -        "-": operator.sub, -        "*": operator.mul, -        "/": operator.truediv, -        "//": operator.floordiv, -        "**": operator.pow, -        "%": operator.mod, -    } - -    #: default callback table for the unary operators.  A copy of this is -    #: available on each instance of a sandboxed environment as -    #: :attr:`unop_table` -    default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = { -        "+": operator.pos, -        "-": operator.neg, -    } - -    #: a set of binary operators that should be intercepted.  Each operator -    #: that is added to this set (empty by default) is delegated to the -    #: :meth:`call_binop` method that will perform the operator.  The default -    #: operator callback is specified by :attr:`binop_table`. -    #: -    #: The following binary operators are interceptable: -    #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**`` -    #: -    #: The default operation form the operator table corresponds to the -    #: builtin function.  Intercepted calls are always slower than the native -    #: operator call, so make sure only to intercept the ones you are -    #: interested in. -    #: -    #: .. versionadded:: 2.6 -    intercepted_binops: t.FrozenSet[str] = frozenset() - -    #: a set of unary operators that should be intercepted.  Each operator -    #: that is added to this set (empty by default) is delegated to the -    #: :meth:`call_unop` method that will perform the operator.  The default -    #: operator callback is specified by :attr:`unop_table`. -    #: -    #: The following unary operators are interceptable: ``+``, ``-`` -    #: -    #: The default operation form the operator table corresponds to the -    #: builtin function.  Intercepted calls are always slower than the native -    #: operator call, so make sure only to intercept the ones you are -    #: interested in. -    #: -    #: .. versionadded:: 2.6 -    intercepted_unops: t.FrozenSet[str] = frozenset() - -    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: -        super().__init__(*args, **kwargs) -        self.globals["range"] = safe_range -        self.binop_table = self.default_binop_table.copy() -        self.unop_table = self.default_unop_table.copy() - -    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: -        """The sandboxed environment will call this method to check if the -        attribute of an object is safe to access.  Per default all attributes -        starting with an underscore are considered private as well as the -        special attributes of internal python objects as returned by the -        :func:`is_internal_attribute` function. -        """ -        return not (attr.startswith("_") or is_internal_attribute(obj, attr)) - -    def is_safe_callable(self, obj: t.Any) -> bool: -        """Check if an object is safely callable. By default callables -        are considered safe unless decorated with :func:`unsafe`. - -        This also recognizes the Django convention of setting -        ``func.alters_data = True``. -        """ -        return not ( -            getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False) -        ) - -    def call_binop( -        self, context: Context, operator: str, left: t.Any, right: t.Any -    ) -> t.Any: -        """For intercepted binary operator calls (:meth:`intercepted_binops`) -        this function is executed instead of the builtin operator.  This can -        be used to fine tune the behavior of certain operators. - -        .. versionadded:: 2.6 -        """ -        return self.binop_table[operator](left, right) - -    def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any: -        """For intercepted unary operator calls (:meth:`intercepted_unops`) -        this function is executed instead of the builtin operator.  This can -        be used to fine tune the behavior of certain operators. - -        .. versionadded:: 2.6 -        """ -        return self.unop_table[operator](arg) - -    def getitem( -        self, obj: t.Any, argument: t.Union[str, t.Any] -    ) -> t.Union[t.Any, Undefined]: -        """Subscribe an object from sandboxed code.""" -        try: -            return obj[argument] -        except (TypeError, LookupError): -            if isinstance(argument, str): -                try: -                    attr = str(argument) -                except Exception: -                    pass -                else: -                    try: -                        value = getattr(obj, attr) -                    except AttributeError: -                        pass -                    else: -                        if self.is_safe_attribute(obj, argument, value): -                            return value -                        return self.unsafe_undefined(obj, argument) -        return self.undefined(obj=obj, name=argument) - -    def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]: -        """Subscribe an object from sandboxed code and prefer the -        attribute.  The attribute passed *must* be a bytestring. -        """ -        try: -            value = getattr(obj, attribute) -        except AttributeError: -            try: -                return obj[attribute] -            except (TypeError, LookupError): -                pass -        else: -            if self.is_safe_attribute(obj, attribute, value): -                return value -            return self.unsafe_undefined(obj, attribute) -        return self.undefined(obj=obj, name=attribute) - -    def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined: -        """Return an undefined object for unsafe attributes.""" -        return self.undefined( -            f"access to attribute {attribute!r} of" -            f" {type(obj).__name__!r} object is unsafe.", -            name=attribute, -            obj=obj, -            exc=SecurityError, -        ) - -    def format_string( -        self, -        s: str, -        args: t.Tuple[t.Any, ...], -        kwargs: t.Dict[str, t.Any], -        format_func: t.Optional[t.Callable] = None, -    ) -> str: -        """If a format call is detected, then this is routed through this -        method so that our safety sandbox can be used for it. -        """ -        formatter: SandboxedFormatter -        if isinstance(s, Markup): -            formatter = SandboxedEscapeFormatter(self, escape=s.escape) -        else: -            formatter = SandboxedFormatter(self) - -        if format_func is not None and format_func.__name__ == "format_map": -            if len(args) != 1 or kwargs: -                raise TypeError( -                    "format_map() takes exactly one argument" -                    f" {len(args) + (kwargs is not None)} given" -                ) - -            kwargs = args[0] -            args = () - -        rv = formatter.vformat(s, args, kwargs) -        return type(s)(rv) - -    def call( -        __self,  # noqa: B902 -        __context: Context, -        __obj: t.Any, -        *args: t.Any, -        **kwargs: t.Any, -    ) -> t.Any: -        """Call an object from sandboxed code.""" -        fmt = inspect_format_method(__obj) -        if fmt is not None: -            return __self.format_string(fmt, args, kwargs, __obj) - -        # the double prefixes are to avoid double keyword argument -        # errors when proxying the call. -        if not __self.is_safe_callable(__obj): -            raise SecurityError(f"{__obj!r} is not safely callable") -        return __context.call(__obj, *args, **kwargs) - - -class ImmutableSandboxedEnvironment(SandboxedEnvironment): -    """Works exactly like the regular `SandboxedEnvironment` but does not -    permit modifications on the builtin mutable objects `list`, `set`, and -    `dict` by using the :func:`modifies_known_mutable` function. -    """ - -    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: -        if not super().is_safe_attribute(obj, attr, value): -            return False - -        return not modifies_known_mutable(obj, attr) - - -class SandboxedFormatter(Formatter): -    def __init__(self, env: Environment, **kwargs: t.Any) -> None: -        self._env = env -        super().__init__(**kwargs) - -    def get_field( -        self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any] -    ) -> t.Tuple[t.Any, str]: -        first, rest = formatter_field_name_split(field_name) -        obj = self.get_value(first, args, kwargs) -        for is_attr, i in rest: -            if is_attr: -                obj = self._env.getattr(obj, i) -            else: -                obj = self._env.getitem(obj, i) -        return obj, first - - -class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter): -    pass diff --git a/venv/lib/python3.11/site-packages/jinja2/tests.py b/venv/lib/python3.11/site-packages/jinja2/tests.py deleted file mode 100644 index a467cf0..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/tests.py +++ /dev/null @@ -1,255 +0,0 @@ -"""Built-in template tests used with the ``is`` operator.""" -import operator -import typing as t -from collections import abc -from numbers import Number - -from .runtime import Undefined -from .utils import pass_environment - -if t.TYPE_CHECKING: -    from .environment import Environment - - -def test_odd(value: int) -> bool: -    """Return true if the variable is odd.""" -    return value % 2 == 1 - - -def test_even(value: int) -> bool: -    """Return true if the variable is even.""" -    return value % 2 == 0 - - -def test_divisibleby(value: int, num: int) -> bool: -    """Check if a variable is divisible by a number.""" -    return value % num == 0 - - -def test_defined(value: t.Any) -> bool: -    """Return true if the variable is defined: - -    .. sourcecode:: jinja - -        {% if variable is defined %} -            value of variable: {{ variable }} -        {% else %} -            variable is not defined -        {% endif %} - -    See the :func:`default` filter for a simple way to set undefined -    variables. -    """ -    return not isinstance(value, Undefined) - - -def test_undefined(value: t.Any) -> bool: -    """Like :func:`defined` but the other way round.""" -    return isinstance(value, Undefined) - - -@pass_environment -def test_filter(env: "Environment", value: str) -> bool: -    """Check if a filter exists by name. Useful if a filter may be -    optionally available. - -    .. code-block:: jinja - -        {% if 'markdown' is filter %} -            {{ value | markdown }} -        {% else %} -            {{ value }} -        {% endif %} - -    .. versionadded:: 3.0 -    """ -    return value in env.filters - - -@pass_environment -def test_test(env: "Environment", value: str) -> bool: -    """Check if a test exists by name. Useful if a test may be -    optionally available. - -    .. code-block:: jinja - -        {% if 'loud' is test %} -            {% if value is loud %} -                {{ value|upper }} -            {% else %} -                {{ value|lower }} -            {% endif %} -        {% else %} -            {{ value }} -        {% endif %} - -    .. versionadded:: 3.0 -    """ -    return value in env.tests - - -def test_none(value: t.Any) -> bool: -    """Return true if the variable is none.""" -    return value is None - - -def test_boolean(value: t.Any) -> bool: -    """Return true if the object is a boolean value. - -    .. versionadded:: 2.11 -    """ -    return value is True or value is False - - -def test_false(value: t.Any) -> bool: -    """Return true if the object is False. - -    .. versionadded:: 2.11 -    """ -    return value is False - - -def test_true(value: t.Any) -> bool: -    """Return true if the object is True. - -    .. versionadded:: 2.11 -    """ -    return value is True - - -# NOTE: The existing 'number' test matches booleans and floats -def test_integer(value: t.Any) -> bool: -    """Return true if the object is an integer. - -    .. versionadded:: 2.11 -    """ -    return isinstance(value, int) and value is not True and value is not False - - -# NOTE: The existing 'number' test matches booleans and integers -def test_float(value: t.Any) -> bool: -    """Return true if the object is a float. - -    .. versionadded:: 2.11 -    """ -    return isinstance(value, float) - - -def test_lower(value: str) -> bool: -    """Return true if the variable is lowercased.""" -    return str(value).islower() - - -def test_upper(value: str) -> bool: -    """Return true if the variable is uppercased.""" -    return str(value).isupper() - - -def test_string(value: t.Any) -> bool: -    """Return true if the object is a string.""" -    return isinstance(value, str) - - -def test_mapping(value: t.Any) -> bool: -    """Return true if the object is a mapping (dict etc.). - -    .. versionadded:: 2.6 -    """ -    return isinstance(value, abc.Mapping) - - -def test_number(value: t.Any) -> bool: -    """Return true if the variable is a number.""" -    return isinstance(value, Number) - - -def test_sequence(value: t.Any) -> bool: -    """Return true if the variable is a sequence. Sequences are variables -    that are iterable. -    """ -    try: -        len(value) -        value.__getitem__ -    except Exception: -        return False - -    return True - - -def test_sameas(value: t.Any, other: t.Any) -> bool: -    """Check if an object points to the same memory address than another -    object: - -    .. sourcecode:: jinja - -        {% if foo.attribute is sameas false %} -            the foo attribute really is the `False` singleton -        {% endif %} -    """ -    return value is other - - -def test_iterable(value: t.Any) -> bool: -    """Check if it's possible to iterate over an object.""" -    try: -        iter(value) -    except TypeError: -        return False - -    return True - - -def test_escaped(value: t.Any) -> bool: -    """Check if the value is escaped.""" -    return hasattr(value, "__html__") - - -def test_in(value: t.Any, seq: t.Container) -> bool: -    """Check if value is in seq. - -    .. versionadded:: 2.10 -    """ -    return value in seq - - -TESTS = { -    "odd": test_odd, -    "even": test_even, -    "divisibleby": test_divisibleby, -    "defined": test_defined, -    "undefined": test_undefined, -    "filter": test_filter, -    "test": test_test, -    "none": test_none, -    "boolean": test_boolean, -    "false": test_false, -    "true": test_true, -    "integer": test_integer, -    "float": test_float, -    "lower": test_lower, -    "upper": test_upper, -    "string": test_string, -    "mapping": test_mapping, -    "number": test_number, -    "sequence": test_sequence, -    "iterable": test_iterable, -    "callable": callable, -    "sameas": test_sameas, -    "escaped": test_escaped, -    "in": test_in, -    "==": operator.eq, -    "eq": operator.eq, -    "equalto": operator.eq, -    "!=": operator.ne, -    "ne": operator.ne, -    ">": operator.gt, -    "gt": operator.gt, -    "greaterthan": operator.gt, -    "ge": operator.ge, -    ">=": operator.ge, -    "<": operator.lt, -    "lt": operator.lt, -    "lessthan": operator.lt, -    "<=": operator.le, -    "le": operator.le, -} diff --git a/venv/lib/python3.11/site-packages/jinja2/utils.py b/venv/lib/python3.11/site-packages/jinja2/utils.py deleted file mode 100644 index 18914a5..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/utils.py +++ /dev/null @@ -1,755 +0,0 @@ -import enum -import json -import os -import re -import typing as t -from collections import abc -from collections import deque -from random import choice -from random import randrange -from threading import Lock -from types import CodeType -from urllib.parse import quote_from_bytes - -import markupsafe - -if t.TYPE_CHECKING: -    import typing_extensions as te - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -# special singleton representing missing values for the runtime -missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})() - -internal_code: t.MutableSet[CodeType] = set() - -concat = "".join - - -def pass_context(f: F) -> F: -    """Pass the :class:`~jinja2.runtime.Context` as the first argument -    to the decorated function when called while rendering a template. - -    Can be used on functions, filters, and tests. - -    If only ``Context.eval_context`` is needed, use -    :func:`pass_eval_context`. If only ``Context.environment`` is -    needed, use :func:`pass_environment`. - -    .. versionadded:: 3.0.0 -        Replaces ``contextfunction`` and ``contextfilter``. -    """ -    f.jinja_pass_arg = _PassArg.context  # type: ignore -    return f - - -def pass_eval_context(f: F) -> F: -    """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument -    to the decorated function when called while rendering a template. -    See :ref:`eval-context`. - -    Can be used on functions, filters, and tests. - -    If only ``EvalContext.environment`` is needed, use -    :func:`pass_environment`. - -    .. versionadded:: 3.0.0 -        Replaces ``evalcontextfunction`` and ``evalcontextfilter``. -    """ -    f.jinja_pass_arg = _PassArg.eval_context  # type: ignore -    return f - - -def pass_environment(f: F) -> F: -    """Pass the :class:`~jinja2.Environment` as the first argument to -    the decorated function when called while rendering a template. - -    Can be used on functions, filters, and tests. - -    .. versionadded:: 3.0.0 -        Replaces ``environmentfunction`` and ``environmentfilter``. -    """ -    f.jinja_pass_arg = _PassArg.environment  # type: ignore -    return f - - -class _PassArg(enum.Enum): -    context = enum.auto() -    eval_context = enum.auto() -    environment = enum.auto() - -    @classmethod -    def from_obj(cls, obj: F) -> t.Optional["_PassArg"]: -        if hasattr(obj, "jinja_pass_arg"): -            return obj.jinja_pass_arg  # type: ignore - -        return None - - -def internalcode(f: F) -> F: -    """Marks the function as internally used""" -    internal_code.add(f.__code__) -    return f - - -def is_undefined(obj: t.Any) -> bool: -    """Check if the object passed is undefined.  This does nothing more than -    performing an instance check against :class:`Undefined` but looks nicer. -    This can be used for custom filters or tests that want to react to -    undefined variables.  For example a custom default filter can look like -    this:: - -        def default(var, default=''): -            if is_undefined(var): -                return default -            return var -    """ -    from .runtime import Undefined - -    return isinstance(obj, Undefined) - - -def consume(iterable: t.Iterable[t.Any]) -> None: -    """Consumes an iterable without doing anything with it.""" -    for _ in iterable: -        pass - - -def clear_caches() -> None: -    """Jinja keeps internal caches for environments and lexers.  These are -    used so that Jinja doesn't have to recreate environments and lexers all -    the time.  Normally you don't have to care about that but if you are -    measuring memory consumption you may want to clean the caches. -    """ -    from .environment import get_spontaneous_environment -    from .lexer import _lexer_cache - -    get_spontaneous_environment.cache_clear() -    _lexer_cache.clear() - - -def import_string(import_name: str, silent: bool = False) -> t.Any: -    """Imports an object based on a string.  This is useful if you want to -    use import paths as endpoints or something similar.  An import path can -    be specified either in dotted notation (``xml.sax.saxutils.escape``) -    or with a colon as object delimiter (``xml.sax.saxutils:escape``). - -    If the `silent` is True the return value will be `None` if the import -    fails. - -    :return: imported object -    """ -    try: -        if ":" in import_name: -            module, obj = import_name.split(":", 1) -        elif "." in import_name: -            module, _, obj = import_name.rpartition(".") -        else: -            return __import__(import_name) -        return getattr(__import__(module, None, None, [obj]), obj) -    except (ImportError, AttributeError): -        if not silent: -            raise - - -def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]: -    """Returns a file descriptor for the filename if that file exists, -    otherwise ``None``. -    """ -    if not os.path.isfile(filename): -        return None - -    return open(filename, mode) - - -def object_type_repr(obj: t.Any) -> str: -    """Returns the name of the object's type.  For some recognized -    singletons the name of the object is returned instead. (For -    example for `None` and `Ellipsis`). -    """ -    if obj is None: -        return "None" -    elif obj is Ellipsis: -        return "Ellipsis" - -    cls = type(obj) - -    if cls.__module__ == "builtins": -        return f"{cls.__name__} object" - -    return f"{cls.__module__}.{cls.__name__} object" - - -def pformat(obj: t.Any) -> str: -    """Format an object using :func:`pprint.pformat`.""" -    from pprint import pformat - -    return pformat(obj) - - -_http_re = re.compile( -    r""" -    ^ -    ( -        (https?://|www\.)  # scheme or www -        (([\w%-]+\.)+)?  # subdomain -        ( -            [a-z]{2,63}  # basic tld -        | -            xn--[\w%]{2,59}  # idna tld -        ) -    | -        ([\w%-]{2,63}\.)+  # basic domain -        (com|net|int|edu|gov|org|info|mil)  # basic tld -    | -        (https?://)  # scheme -        ( -            (([\d]{1,3})(\.[\d]{1,3}){3})  # IPv4 -        | -            (\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}])  # IPv6 -        ) -    ) -    (?::[\d]{1,5})?  # port -    (?:[/?#]\S*)?  # path, query, and fragment -    $ -    """, -    re.IGNORECASE | re.VERBOSE, -) -_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$") - - -def urlize( -    text: str, -    trim_url_limit: t.Optional[int] = None, -    rel: t.Optional[str] = None, -    target: t.Optional[str] = None, -    extra_schemes: t.Optional[t.Iterable[str]] = None, -) -> str: -    """Convert URLs in text into clickable links. - -    This may not recognize links in some situations. Usually, a more -    comprehensive formatter, such as a Markdown library, is a better -    choice. - -    Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email -    addresses. Links with trailing punctuation (periods, commas, closing -    parentheses) and leading punctuation (opening parentheses) are -    recognized excluding the punctuation. Email addresses that include -    header fields are not recognized (for example, -    ``mailto:address@example.com?cc=copy@example.com``). - -    :param text: Original text containing URLs to link. -    :param trim_url_limit: Shorten displayed URL values to this length. -    :param target: Add the ``target`` attribute to links. -    :param rel: Add the ``rel`` attribute to links. -    :param extra_schemes: Recognize URLs that start with these schemes -        in addition to the default behavior. - -    .. versionchanged:: 3.0 -        The ``extra_schemes`` parameter was added. - -    .. versionchanged:: 3.0 -        Generate ``https://`` links for URLs without a scheme. - -    .. versionchanged:: 3.0 -        The parsing rules were updated. Recognize email addresses with -        or without the ``mailto:`` scheme. Validate IP addresses. Ignore -        parentheses and brackets in more cases. -    """ -    if trim_url_limit is not None: - -        def trim_url(x: str) -> str: -            if len(x) > trim_url_limit: -                return f"{x[:trim_url_limit]}..." - -            return x - -    else: - -        def trim_url(x: str) -> str: -            return x - -    words = re.split(r"(\s+)", str(markupsafe.escape(text))) -    rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else "" -    target_attr = f' target="{markupsafe.escape(target)}"' if target else "" - -    for i, word in enumerate(words): -        head, middle, tail = "", word, "" -        match = re.match(r"^([(<]|<)+", middle) - -        if match: -            head = match.group() -            middle = middle[match.end() :] - -        # Unlike lead, which is anchored to the start of the string, -        # need to check that the string ends with any of the characters -        # before trying to match all of them, to avoid backtracking. -        if middle.endswith((")", ">", ".", ",", "\n", ">")): -            match = re.search(r"([)>.,\n]|>)+$", middle) - -            if match: -                tail = match.group() -                middle = middle[: match.start()] - -        # Prefer balancing parentheses in URLs instead of ignoring a -        # trailing character. -        for start_char, end_char in ("(", ")"), ("<", ">"), ("<", ">"): -            start_count = middle.count(start_char) - -            if start_count <= middle.count(end_char): -                # Balanced, or lighter on the left -                continue - -            # Move as many as possible from the tail to balance -            for _ in range(min(start_count, tail.count(end_char))): -                end_index = tail.index(end_char) + len(end_char) -                # Move anything in the tail before the end char too -                middle += tail[:end_index] -                tail = tail[end_index:] - -        if _http_re.match(middle): -            if middle.startswith("https://") or middle.startswith("http://"): -                middle = ( -                    f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>' -                ) -            else: -                middle = ( -                    f'<a href="https://{middle}"{rel_attr}{target_attr}>' -                    f"{trim_url(middle)}</a>" -                ) - -        elif middle.startswith("mailto:") and _email_re.match(middle[7:]): -            middle = f'<a href="{middle}">{middle[7:]}</a>' - -        elif ( -            "@" in middle -            and not middle.startswith("www.") -            and ":" not in middle -            and _email_re.match(middle) -        ): -            middle = f'<a href="mailto:{middle}">{middle}</a>' - -        elif extra_schemes is not None: -            for scheme in extra_schemes: -                if middle != scheme and middle.startswith(scheme): -                    middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>' - -        words[i] = f"{head}{middle}{tail}" - -    return "".join(words) - - -def generate_lorem_ipsum( -    n: int = 5, html: bool = True, min: int = 20, max: int = 100 -) -> str: -    """Generate some lorem ipsum for the template.""" -    from .constants import LOREM_IPSUM_WORDS - -    words = LOREM_IPSUM_WORDS.split() -    result = [] - -    for _ in range(n): -        next_capitalized = True -        last_comma = last_fullstop = 0 -        word = None -        last = None -        p = [] - -        # each paragraph contains out of 20 to 100 words. -        for idx, _ in enumerate(range(randrange(min, max))): -            while True: -                word = choice(words) -                if word != last: -                    last = word -                    break -            if next_capitalized: -                word = word.capitalize() -                next_capitalized = False -            # add commas -            if idx - randrange(3, 8) > last_comma: -                last_comma = idx -                last_fullstop += 2 -                word += "," -            # add end of sentences -            if idx - randrange(10, 20) > last_fullstop: -                last_comma = last_fullstop = idx -                word += "." -                next_capitalized = True -            p.append(word) - -        # ensure that the paragraph ends with a dot. -        p_str = " ".join(p) - -        if p_str.endswith(","): -            p_str = p_str[:-1] + "." -        elif not p_str.endswith("."): -            p_str += "." - -        result.append(p_str) - -    if not html: -        return "\n\n".join(result) -    return markupsafe.Markup( -        "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result) -    ) - - -def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str: -    """Quote a string for use in a URL using the given charset. - -    :param obj: String or bytes to quote. Other types are converted to -        string then encoded to bytes using the given charset. -    :param charset: Encode text to bytes using this charset. -    :param for_qs: Quote "/" and use "+" for spaces. -    """ -    if not isinstance(obj, bytes): -        if not isinstance(obj, str): -            obj = str(obj) - -        obj = obj.encode(charset) - -    safe = b"" if for_qs else b"/" -    rv = quote_from_bytes(obj, safe) - -    if for_qs: -        rv = rv.replace("%20", "+") - -    return rv - - -@abc.MutableMapping.register -class LRUCache: -    """A simple LRU Cache implementation.""" - -    # this is fast for small capacities (something below 1000) but doesn't -    # scale.  But as long as it's only used as storage for templates this -    # won't do any harm. - -    def __init__(self, capacity: int) -> None: -        self.capacity = capacity -        self._mapping: t.Dict[t.Any, t.Any] = {} -        self._queue: "te.Deque[t.Any]" = deque() -        self._postinit() - -    def _postinit(self) -> None: -        # alias all queue methods for faster lookup -        self._popleft = self._queue.popleft -        self._pop = self._queue.pop -        self._remove = self._queue.remove -        self._wlock = Lock() -        self._append = self._queue.append - -    def __getstate__(self) -> t.Mapping[str, t.Any]: -        return { -            "capacity": self.capacity, -            "_mapping": self._mapping, -            "_queue": self._queue, -        } - -    def __setstate__(self, d: t.Mapping[str, t.Any]) -> None: -        self.__dict__.update(d) -        self._postinit() - -    def __getnewargs__(self) -> t.Tuple: -        return (self.capacity,) - -    def copy(self) -> "LRUCache": -        """Return a shallow copy of the instance.""" -        rv = self.__class__(self.capacity) -        rv._mapping.update(self._mapping) -        rv._queue.extend(self._queue) -        return rv - -    def get(self, key: t.Any, default: t.Any = None) -> t.Any: -        """Return an item from the cache dict or `default`""" -        try: -            return self[key] -        except KeyError: -            return default - -    def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any: -        """Set `default` if the key is not in the cache otherwise -        leave unchanged. Return the value of this key. -        """ -        try: -            return self[key] -        except KeyError: -            self[key] = default -            return default - -    def clear(self) -> None: -        """Clear the cache.""" -        with self._wlock: -            self._mapping.clear() -            self._queue.clear() - -    def __contains__(self, key: t.Any) -> bool: -        """Check if a key exists in this cache.""" -        return key in self._mapping - -    def __len__(self) -> int: -        """Return the current size of the cache.""" -        return len(self._mapping) - -    def __repr__(self) -> str: -        return f"<{type(self).__name__} {self._mapping!r}>" - -    def __getitem__(self, key: t.Any) -> t.Any: -        """Get an item from the cache. Moves the item up so that it has the -        highest priority then. - -        Raise a `KeyError` if it does not exist. -        """ -        with self._wlock: -            rv = self._mapping[key] - -            if self._queue[-1] != key: -                try: -                    self._remove(key) -                except ValueError: -                    # if something removed the key from the container -                    # when we read, ignore the ValueError that we would -                    # get otherwise. -                    pass - -                self._append(key) - -            return rv - -    def __setitem__(self, key: t.Any, value: t.Any) -> None: -        """Sets the value for an item. Moves the item up so that it -        has the highest priority then. -        """ -        with self._wlock: -            if key in self._mapping: -                self._remove(key) -            elif len(self._mapping) == self.capacity: -                del self._mapping[self._popleft()] - -            self._append(key) -            self._mapping[key] = value - -    def __delitem__(self, key: t.Any) -> None: -        """Remove an item from the cache dict. -        Raise a `KeyError` if it does not exist. -        """ -        with self._wlock: -            del self._mapping[key] - -            try: -                self._remove(key) -            except ValueError: -                pass - -    def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]: -        """Return a list of items.""" -        result = [(key, self._mapping[key]) for key in list(self._queue)] -        result.reverse() -        return result - -    def values(self) -> t.Iterable[t.Any]: -        """Return a list of all values.""" -        return [x[1] for x in self.items()] - -    def keys(self) -> t.Iterable[t.Any]: -        """Return a list of all keys ordered by most recent usage.""" -        return list(self) - -    def __iter__(self) -> t.Iterator[t.Any]: -        return reversed(tuple(self._queue)) - -    def __reversed__(self) -> t.Iterator[t.Any]: -        """Iterate over the keys in the cache dict, oldest items -        coming first. -        """ -        return iter(tuple(self._queue)) - -    __copy__ = copy - - -def select_autoescape( -    enabled_extensions: t.Collection[str] = ("html", "htm", "xml"), -    disabled_extensions: t.Collection[str] = (), -    default_for_string: bool = True, -    default: bool = False, -) -> t.Callable[[t.Optional[str]], bool]: -    """Intelligently sets the initial value of autoescaping based on the -    filename of the template.  This is the recommended way to configure -    autoescaping if you do not want to write a custom function yourself. - -    If you want to enable it for all templates created from strings or -    for all templates with `.html` and `.xml` extensions:: - -        from jinja2 import Environment, select_autoescape -        env = Environment(autoescape=select_autoescape( -            enabled_extensions=('html', 'xml'), -            default_for_string=True, -        )) - -    Example configuration to turn it on at all times except if the template -    ends with `.txt`:: - -        from jinja2 import Environment, select_autoescape -        env = Environment(autoescape=select_autoescape( -            disabled_extensions=('txt',), -            default_for_string=True, -            default=True, -        )) - -    The `enabled_extensions` is an iterable of all the extensions that -    autoescaping should be enabled for.  Likewise `disabled_extensions` is -    a list of all templates it should be disabled for.  If a template is -    loaded from a string then the default from `default_for_string` is used. -    If nothing matches then the initial value of autoescaping is set to the -    value of `default`. - -    For security reasons this function operates case insensitive. - -    .. versionadded:: 2.9 -    """ -    enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions) -    disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions) - -    def autoescape(template_name: t.Optional[str]) -> bool: -        if template_name is None: -            return default_for_string -        template_name = template_name.lower() -        if template_name.endswith(enabled_patterns): -            return True -        if template_name.endswith(disabled_patterns): -            return False -        return default - -    return autoescape - - -def htmlsafe_json_dumps( -    obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any -) -> markupsafe.Markup: -    """Serialize an object to a string of JSON with :func:`json.dumps`, -    then replace HTML-unsafe characters with Unicode escapes and mark -    the result safe with :class:`~markupsafe.Markup`. - -    This is available in templates as the ``|tojson`` filter. - -    The following characters are escaped: ``<``, ``>``, ``&``, ``'``. - -    The returned string is safe to render in HTML documents and -    ``<script>`` tags. The exception is in HTML attributes that are -    double quoted; either use single quotes or the ``|forceescape`` -    filter. - -    :param obj: The object to serialize to JSON. -    :param dumps: The ``dumps`` function to use. Defaults to -        ``env.policies["json.dumps_function"]``, which defaults to -        :func:`json.dumps`. -    :param kwargs: Extra arguments to pass to ``dumps``. Merged onto -        ``env.policies["json.dumps_kwargs"]``. - -    .. versionchanged:: 3.0 -        The ``dumper`` parameter is renamed to ``dumps``. - -    .. versionadded:: 2.9 -    """ -    if dumps is None: -        dumps = json.dumps - -    return markupsafe.Markup( -        dumps(obj, **kwargs) -        .replace("<", "\\u003c") -        .replace(">", "\\u003e") -        .replace("&", "\\u0026") -        .replace("'", "\\u0027") -    ) - - -class Cycler: -    """Cycle through values by yield them one at a time, then restarting -    once the end is reached. Available as ``cycler`` in templates. - -    Similar to ``loop.cycle``, but can be used outside loops or across -    multiple loops. For example, render a list of folders and files in a -    list, alternating giving them "odd" and "even" classes. - -    .. code-block:: html+jinja - -        {% set row_class = cycler("odd", "even") %} -        <ul class="browser"> -        {% for folder in folders %} -          <li class="folder {{ row_class.next() }}">{{ folder }} -        {% endfor %} -        {% for file in files %} -          <li class="file {{ row_class.next() }}">{{ file }} -        {% endfor %} -        </ul> - -    :param items: Each positional argument will be yielded in the order -        given for each cycle. - -    .. versionadded:: 2.1 -    """ - -    def __init__(self, *items: t.Any) -> None: -        if not items: -            raise RuntimeError("at least one item has to be provided") -        self.items = items -        self.pos = 0 - -    def reset(self) -> None: -        """Resets the current item to the first item.""" -        self.pos = 0 - -    @property -    def current(self) -> t.Any: -        """Return the current item. Equivalent to the item that will be -        returned next time :meth:`next` is called. -        """ -        return self.items[self.pos] - -    def next(self) -> t.Any: -        """Return the current item, then advance :attr:`current` to the -        next item. -        """ -        rv = self.current -        self.pos = (self.pos + 1) % len(self.items) -        return rv - -    __next__ = next - - -class Joiner: -    """A joining helper for templates.""" - -    def __init__(self, sep: str = ", ") -> None: -        self.sep = sep -        self.used = False - -    def __call__(self) -> str: -        if not self.used: -            self.used = True -            return "" -        return self.sep - - -class Namespace: -    """A namespace object that can hold arbitrary attributes.  It may be -    initialized from a dictionary or with keyword arguments.""" - -    def __init__(*args: t.Any, **kwargs: t.Any) -> None:  # noqa: B902 -        self, args = args[0], args[1:] -        self.__attrs = dict(*args, **kwargs) - -    def __getattribute__(self, name: str) -> t.Any: -        # __class__ is needed for the awaitable check in async mode -        if name in {"_Namespace__attrs", "__class__"}: -            return object.__getattribute__(self, name) -        try: -            return self.__attrs[name] -        except KeyError: -            raise AttributeError(name) from None - -    def __setitem__(self, name: str, value: t.Any) -> None: -        self.__attrs[name] = value - -    def __repr__(self) -> str: -        return f"<Namespace {self.__attrs!r}>" diff --git a/venv/lib/python3.11/site-packages/jinja2/visitor.py b/venv/lib/python3.11/site-packages/jinja2/visitor.py deleted file mode 100644 index 17c6aab..0000000 --- a/venv/lib/python3.11/site-packages/jinja2/visitor.py +++ /dev/null @@ -1,92 +0,0 @@ -"""API for traversing the AST nodes. Implemented by the compiler and -meta introspection. -""" -import typing as t - -from .nodes import Node - -if t.TYPE_CHECKING: -    import typing_extensions as te - -    class VisitCallable(te.Protocol): -        def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: -            ... - - -class NodeVisitor: -    """Walks the abstract syntax tree and call visitor functions for every -    node found.  The visitor functions may return values which will be -    forwarded by the `visit` method. - -    Per default the visitor functions for the nodes are ``'visit_'`` + -    class name of the node.  So a `TryFinally` node visit function would -    be `visit_TryFinally`.  This behavior can be changed by overriding -    the `get_visitor` function.  If no visitor function exists for a node -    (return value `None`) the `generic_visit` visitor is used instead. -    """ - -    def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]": -        """Return the visitor function for this node or `None` if no visitor -        exists for this node.  In that case the generic visit function is -        used instead. -        """ -        return getattr(self, f"visit_{type(node).__name__}", None) - -    def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: -        """Visit a node.""" -        f = self.get_visitor(node) - -        if f is not None: -            return f(node, *args, **kwargs) - -        return self.generic_visit(node, *args, **kwargs) - -    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: -        """Called if no explicit visitor function exists for a node.""" -        for child_node in node.iter_child_nodes(): -            self.visit(child_node, *args, **kwargs) - - -class NodeTransformer(NodeVisitor): -    """Walks the abstract syntax tree and allows modifications of nodes. - -    The `NodeTransformer` will walk the AST and use the return value of the -    visitor functions to replace or remove the old node.  If the return -    value of the visitor function is `None` the node will be removed -    from the previous location otherwise it's replaced with the return -    value.  The return value may be the original node in which case no -    replacement takes place. -    """ - -    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node: -        for field, old_value in node.iter_fields(): -            if isinstance(old_value, list): -                new_values = [] -                for value in old_value: -                    if isinstance(value, Node): -                        value = self.visit(value, *args, **kwargs) -                        if value is None: -                            continue -                        elif not isinstance(value, Node): -                            new_values.extend(value) -                            continue -                    new_values.append(value) -                old_value[:] = new_values -            elif isinstance(old_value, Node): -                new_node = self.visit(old_value, *args, **kwargs) -                if new_node is None: -                    delattr(node, field) -                else: -                    setattr(node, field, new_node) -        return node - -    def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]: -        """As transformers may return lists in some places this method -        can be used to enforce a list as return value. -        """ -        rv = self.visit(node, *args, **kwargs) - -        if not isinstance(rv, list): -            return [rv] - -        return rv | 
