"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
is used to do some preprocessing. It filters out invalid operators like
the bitshift operators we don't allow in templates. It separates
template code and python code in expressions.
"""
import re
import typing as t
from ast import literal_eval
from collections import deque
from sys import intern
from ._identifier import pattern as name_re
from .exceptions import TemplateSyntaxError
from .utils import LRUCache
if t.TYPE_CHECKING:
    import typing_extensions as te
    from .environment import Environment
_lexer_cache: t.MutableMapping[t.Tuple, 'Lexer'] = LRUCache(50)
whitespace_re = re.compile('\\s+')
newline_re = re.compile('(\\r\\n|\\r|\\n)')
string_re = re.compile('(\'([^\'\\\\]*(?:\\\\.[^\'\\\\]*)*)\'|"([^"\\\\]*(?:\\\\.[^"\\\\]*)*)")', re.S)
integer_re = re.compile('\n    (\n        0b(_?[0-1])+ # binary\n    |\n        0o(_?[0-7])+ # octal\n    |\n        0x(_?[\\da-f])+ # hex\n    |\n        [1-9](_?\\d)* # decimal\n    |\n        0(_?0)* # decimal zero\n    )\n    ', re.IGNORECASE | re.VERBOSE)
float_re = re.compile("\n    (?<!\\.)  # doesn't start with a .\n    (\\d+_)*\\d+  # digits, possibly _ separated\n    (\n        (\\.(\\d+_)*\\d+)?  # optional fractional part\n        e[+\\-]?(\\d+_)*\\d+  # exponent part\n    |\n        \\.(\\d+_)*\\d+  # required fractional part\n    )\n    ", re.IGNORECASE | re.VERBOSE)
TOKEN_ADD = intern('add')
TOKEN_ASSIGN = intern('assign')
TOKEN_COLON = intern('colon')
TOKEN_COMMA = intern('comma')
TOKEN_DIV = intern('div')
TOKEN_DOT = intern('dot')
TOKEN_EQ = intern('eq')
TOKEN_FLOORDIV = intern('floordiv')
TOKEN_GT = intern('gt')
TOKEN_GTEQ = intern('gteq')
TOKEN_LBRACE = intern('lbrace')
TOKEN_LBRACKET = intern('lbracket')
TOKEN_LPAREN = intern('lparen')
TOKEN_LT = intern('lt')
TOKEN_LTEQ = intern('lteq')
TOKEN_MOD = intern('mod')
TOKEN_MUL = intern('mul')
TOKEN_NE = intern('ne')
TOKEN_PIPE = intern('pipe')
TOKEN_POW = intern('pow')
TOKEN_RBRACE = intern('rbrace')
TOKEN_RBRACKET = intern('rbracket')
TOKEN_RPAREN = intern('rparen')
TOKEN_SEMICOLON = intern('semicolon')
TOKEN_SUB = intern('sub')
TOKEN_TILDE = intern('tilde')
TOKEN_WHITESPACE = intern('whitespace')
TOKEN_FLOAT = intern('float')
TOKEN_INTEGER = intern('integer')
TOKEN_NAME = intern('name')
TOKEN_STRING = intern('string')
TOKEN_OPERATOR = intern('operator')
TOKEN_BLOCK_BEGIN = intern('block_begin')
TOKEN_BLOCK_END = intern('block_end')
TOKEN_VARIABLE_BEGIN = intern('variable_begin')
TOKEN_VARIABLE_END = intern('variable_end')
TOKEN_RAW_BEGIN = intern('raw_begin')
TOKEN_RAW_END = intern('raw_end')
TOKEN_COMMENT_BEGIN = intern('comment_begin')
TOKEN_COMMENT_END = intern('comment_end')
TOKEN_COMMENT = intern('comment')
TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
TOKEN_LINESTATEMENT_END = intern('linestatement_end')
TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
TOKEN_LINECOMMENT_END = intern('linecomment_end')
TOKEN_LINECOMMENT = intern('linecomment')
TOKEN_DATA = intern('data')
TOKEN_INITIAL = intern('initial')
TOKEN_EOF = intern('eof')
operators = {'+': TOKEN_ADD, '-': TOKEN_SUB, '/': TOKEN_DIV, '//': TOKEN_FLOORDIV, '*': TOKEN_MUL, '%': TOKEN_MOD, '**': TOKEN_POW, '~': TOKEN_TILDE, '[': TOKEN_LBRACKET, ']': TOKEN_RBRACKET, '(': TOKEN_LPAREN, ')': TOKEN_RPAREN, '{': TOKEN_LBRACE, '}': TOKEN_RBRACE, '==': TOKEN_EQ, '!=': TOKEN_NE, '>': TOKEN_GT, '>=': TOKEN_GTEQ, '<': TOKEN_LT, '<=': TOKEN_LTEQ, '=': TOKEN_ASSIGN, '.': TOKEN_DOT, ':': TOKEN_COLON, '|': TOKEN_PIPE, ',': TOKEN_COMMA, ';': TOKEN_SEMICOLON}
reverse_operators = {v: k for k, v in operators.items()}
assert len(operators) == len(reverse_operators), 'operators dropped'
operator_re = re.compile(f'({'|'.join((re.escape(x) for x in sorted(operators, key=lambda x: -len(x))))})')
ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT, TOKEN_COMMENT_END, TOKEN_WHITESPACE, TOKEN_LINECOMMENT_BEGIN, TOKEN_LINECOMMENT_END, TOKEN_LINECOMMENT])
ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT])

def describe_token(token: 'Token') -> str:
    """Returns a description of the token."""
    pass

def describe_token_expr(expr: str) -> str:
    """Like `describe_token` but for token expressions."""
    pass

def count_newlines(value: str) -> int:
    """Count the number of newline characters in the string.  This is
    useful for extensions that filter a stream.
    """
    pass

def compile_rules(environment: 'Environment') -> t.List[t.Tuple[str, str]]:
    """Compiles all the rules from the environment into a list of rules."""
    pass

class Failure:
    """Class that raises a `TemplateSyntaxError` if called.
    Used by the `Lexer` to specify known errors.
    """

    def __init__(self, message: str, cls: t.Type[TemplateSyntaxError]=TemplateSyntaxError) -> None:
        self.message = message
        self.error_class = cls

    def __call__(self, lineno: int, filename: str) -> 'te.NoReturn':
        raise self.error_class(self.message, lineno, filename)

class Token(t.NamedTuple):
    lineno: int
    type: str
    value: str

    def __str__(self) -> str:
        return describe_token(self)

    def test(self, expr: str) -> bool:
        """Test a token against a token expression.  This can either be a
        token type or ``'token_type:token_value'``.  This can only test
        against string values and types.
        """
        pass

    def test_any(self, *iterable: str) -> bool:
        """Test against multiple token expressions."""
        pass

class TokenStreamIterator:
    """The iterator for tokenstreams.  Iterate over the stream
    until the eof token is reached.
    """

    def __init__(self, stream: 'TokenStream') -> None:
        self.stream = stream

    def __iter__(self) -> 'TokenStreamIterator':
        return self

    def __next__(self) -> Token:
        token = self.stream.current
        if token.type is TOKEN_EOF:
            self.stream.close()
            raise StopIteration
        next(self.stream)
        return token

class TokenStream:
    """A token stream is an iterable that yields :class:`Token`\\s.  The
    parser however does not iterate over it but calls :meth:`next` to go
    one token ahead.  The current active token is stored as :attr:`current`.
    """

    def __init__(self, generator: t.Iterable[Token], name: t.Optional[str], filename: t.Optional[str]):
        self._iter = iter(generator)
        self._pushed: 'te.Deque[Token]' = deque()
        self.name = name
        self.filename = filename
        self.closed = False
        self.current = Token(1, TOKEN_INITIAL, '')
        next(self)

    def __iter__(self) -> TokenStreamIterator:
        return TokenStreamIterator(self)

    def __bool__(self) -> bool:
        return bool(self._pushed) or self.current.type is not TOKEN_EOF

    @property
    def eos(self) -> bool:
        """Are we at the end of the stream?"""
        pass

    def push(self, token: Token) -> None:
        """Push a token back to the stream."""
        pass

    def look(self) -> Token:
        """Look at the next token."""
        pass

    def skip(self, n: int=1) -> None:
        """Got n tokens ahead."""
        pass

    def next_if(self, expr: str) -> t.Optional[Token]:
        """Perform the token test and return the token if it matched.
        Otherwise the return value is `None`.
        """
        pass

    def skip_if(self, expr: str) -> bool:
        """Like :meth:`next_if` but only returns `True` or `False`."""
        pass

    def __next__(self) -> Token:
        """Go one token ahead and return the old one.

        Use the built-in :func:`next` instead of calling this directly.
        """
        rv = self.current
        if self._pushed:
            self.current = self._pushed.popleft()
        elif self.current.type is not TOKEN_EOF:
            try:
                self.current = next(self._iter)
            except StopIteration:
                self.close()
        return rv

    def close(self) -> None:
        """Close the stream."""
        pass

    def expect(self, expr: str) -> Token:
        """Expect a given token type and return it.  This accepts the same
        argument as :meth:`jinja2.lexer.Token.test`.
        """
        pass

def get_lexer(environment: 'Environment') -> 'Lexer':
    """Return a lexer which is probably cached."""
    pass

class OptionalLStrip(tuple):
    """A special tuple for marking a point in the state that can have
    lstrip applied.
    """
    __slots__ = ()

    def __new__(cls, *members, **kwargs):
        return super().__new__(cls, members)

class _Rule(t.NamedTuple):
    pattern: t.Pattern[str]
    tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]]
    command: t.Optional[str]

class Lexer:
    """Class that implements a lexer for a given environment. Automatically
    created by the environment class, usually you don't have to do that.

    Note that the lexer is not automatically bound to an environment.
    Multiple environments can share the same lexer.
    """

    def __init__(self, environment: 'Environment') -> None:
        e = re.escape

        def c(x: str) -> t.Pattern[str]:
            return re.compile(x, re.M | re.S)
        tag_rules: t.List[_Rule] = [_Rule(whitespace_re, TOKEN_WHITESPACE, None), _Rule(float_re, TOKEN_FLOAT, None), _Rule(integer_re, TOKEN_INTEGER, None), _Rule(name_re, TOKEN_NAME, None), _Rule(string_re, TOKEN_STRING, None), _Rule(operator_re, TOKEN_OPERATOR, None)]
        root_tag_rules = compile_rules(environment)
        block_start_re = e(environment.block_start_string)
        block_end_re = e(environment.block_end_string)
        comment_end_re = e(environment.comment_end_string)
        variable_end_re = e(environment.variable_end_string)
        block_suffix_re = '\\n?' if environment.trim_blocks else ''
        self.lstrip_blocks = environment.lstrip_blocks
        self.newline_sequence = environment.newline_sequence
        self.keep_trailing_newline = environment.keep_trailing_newline
        root_raw_re = f'(?P<raw_begin>{block_start_re}(\\-|\\+|)\\s*raw\\s*(?:\\-{block_end_re}\\s*|{block_end_re}))'
        root_parts_re = '|'.join([root_raw_re] + [f'(?P<{n}>{r}(\\-|\\+|))' for n, r in root_tag_rules])
        self.rules: t.Dict[str, t.List[_Rule]] = {'root': [_Rule(c(f'(.*?)(?:{root_parts_re})'), OptionalLStrip(TOKEN_DATA, '#bygroup'), '#bygroup'), _Rule(c('.+'), TOKEN_DATA, None)], TOKEN_COMMENT_BEGIN: [_Rule(c(f'(.*?)((?:\\+{comment_end_re}|\\-{comment_end_re}\\s*|{comment_end_re}{block_suffix_re}))'), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'), _Rule(c('(.)'), (Failure('Missing end of comment tag'),), None)], TOKEN_BLOCK_BEGIN: [_Rule(c(f'(?:\\+{block_end_re}|\\-{block_end_re}\\s*|{block_end_re}{block_suffix_re})'), TOKEN_BLOCK_END, '#pop')] + tag_rules, TOKEN_VARIABLE_BEGIN: [_Rule(c(f'\\-{variable_end_re}\\s*|{variable_end_re}'), TOKEN_VARIABLE_END, '#pop')] + tag_rules, TOKEN_RAW_BEGIN: [_Rule(c(f'(.*?)((?:{block_start_re}(\\-|\\+|))\\s*endraw\\s*(?:\\+{block_end_re}|\\-{block_end_re}\\s*|{block_end_re}{block_suffix_re}))'), OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), '#pop'), _Rule(c('(.)'), (Failure('Missing end of raw directive'),), None)], TOKEN_LINESTATEMENT_BEGIN: [_Rule(c('\\s*(\\n|$)'), TOKEN_LINESTATEMENT_END, '#pop')] + tag_rules, TOKEN_LINECOMMENT_BEGIN: [_Rule(c('(.*?)()(?=\\n|$)'), (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END), '#pop')]}

    def _normalize_newlines(self, value: str) -> str:
        """Replace all newlines with the configured sequence in strings
        and template data.
        """
        pass

    def tokenize(self, source: str, name: t.Optional[str]=None, filename: t.Optional[str]=None, state: t.Optional[str]=None) -> TokenStream:
        """Calls tokeniter + tokenize and wraps it in a token stream."""
        pass

    def wrap(self, stream: t.Iterable[t.Tuple[int, str, str]], name: t.Optional[str]=None, filename: t.Optional[str]=None) -> t.Iterator[Token]:
        """This is called with the stream as returned by `tokenize` and wraps
        every token in a :class:`Token` and converts the value.
        """
        pass

    def tokeniter(self, source: str, name: t.Optional[str], filename: t.Optional[str]=None, state: t.Optional[str]=None) -> t.Iterator[t.Tuple[int, str, str]]:
        """This method tokenizes the text and returns the tokens in a
        generator. Use this method if you just want to tokenize a template.

        .. versionchanged:: 3.0
            Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
            breaks.
        """
        pass---loaders---
"""API and implementations for loading templates from different data
sources.
"""
import importlib.util
import os
import posixpath
import sys
import typing as t
import weakref
import zipimport
from collections import abc
from hashlib import sha1
from importlib import import_module
from types import ModuleType
from .exceptions import TemplateNotFound
from .utils import internalcode
if t.TYPE_CHECKING:
    from .environment import Environment
    from .environment import Template

def split_template_path(template: str) -> t.List[str]:
    """Split a path into segments and perform a sanity check.  If it detects
    '..' in the path it will raise a `TemplateNotFound` error.
    """
    pass

class BaseLoader:
    """Baseclass for all loaders.  Subclass this and override `get_source` to
    implement a custom loading mechanism.  The environment provides a
    `get_template` method that calls the loader's `load` method to get the
    :class:`Template` object.

    A very basic example for a loader that looks up templates on the file
    system could look like this::

        from jinja2 import BaseLoader, TemplateNotFound
        from os.path import join, exists, getmtime

        class MyLoader(BaseLoader):

            def __init__(self, path):
                self.path = path

            def get_source(self, environment, template):
                path = join(self.path, template)
                if not exists(path):
                    raise TemplateNotFound(template)
                mtime = getmtime(path)
                with open(path) as f:
                    source = f.read()
                return source, path, lambda: mtime == getmtime(path)
    """
    has_source_access = True

    def get_source(self, environment: 'Environment', template: str) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
        """Get the template source, filename and reload helper for a template.
        It's passed the environment and template name and has to return a
        tuple in the form ``(source, filename, uptodate)`` or raise a
        `TemplateNotFound` error if it can't locate the template.

        The source part of the returned tuple must be the source of the
        template as a string. The filename should be the name of the
        file on the file<response clipped><NOTE>Due to the max output limit, only part of the full response has been shown to you.</NOTE>  return name in self.vars or name in self.parent

    def __getitem__(self, key: str) -> t.Any:
        """Look up a variable by name with ``[]`` syntax, or raise a
        ``KeyError`` if the key is not found.
        """
        item = self.resolve_or_missing(key)
        if item is missing:
            raise KeyError(key)
        return item

    def __repr__(self) -> str:
        return f'<{type(self).__name__} {self.get_all()!r} of {self.name!r}>'

class BlockReference:
    """One block on a template reference."""

    def __init__(self, name: str, context: 'Context', stack: t.List[t.Callable[['Context'], t.Iterator[str]]], depth: int) -> None:
        self.name = name
        self._context = context
        self._stack = stack
        self._depth = depth

    @property
    def super(self) -> t.Union['BlockReference', 'Undefined']:
        """Super the block."""
        pass

    @internalcode
    def __call__(self) -> str:
        if self._context.environment.is_async:
            return self._async_call()
        rv = concat(self._stack[self._depth](self._context))
        if self._context.eval_ctx.autoescape:
            return Markup(rv)
        return rv

class LoopContext:
    """A wrapper iterable for dynamic ``for`` loops, with information
    about the loop and iteration.
    """
    index0 = -1
    _length: t.Optional[int] = None
    _after: t.Any = missing
    _current: t.Any = missing
    _before: t.Any = missing
    _last_changed_value: t.Any = missing

    def __init__(self, iterable: t.Iterable[V], undefined: t.Type['Undefined'], recurse: t.Optional['LoopRenderFunc']=None, depth0: int=0) -> None:
        """
        :param iterable: Iterable to wrap.
        :param undefined: :class:`Undefined` class to use for next and
            previous items.
        :param recurse: The function to render the loop body when the
---runtime 220-620---
        :param recurse: The function to render the loop body when the
            loop is marked recursive.
        :param depth0: Incremented when looping recursively.
        """
        self._iterable = iterable
        self._iterator = self._to_iterator(iterable)
        self._undefined = undefined
        self._recurse = recurse
        self.depth0 = depth0

    @property
    def length(self) -> int:
        """Length of the iterable.

        If the iterable is a generator or otherwise does not have a
        size, it is eagerly evaluated to get a size.
        """
        pass

    def __len__(self) -> int:
        return self.length

    @property
    def depth(self) -> int:
        """How many levels deep a recursive loop currently is, starting at 1."""
        pass

    @property
    def index(self) -> int:
        """Current iteration of the loop, starting at 1."""
        pass

    @property
    def revindex0(self) -> int:
        """Number of iterations from the end of the loop, ending at 0.

        Requires calculating :attr:`length`.
        """
        pass

    @property
    def revindex(self) -> int:
        """Number of iterations from the end of the loop, ending at 1.

        Requires calculating :attr:`length`.
        """
        pass

    @property
    def first(self) -> bool:
        """Whether this is the first iteration of the loop."""
        pass

    def _peek_next(self) -> t.Any:
        """Return the next element in the iterable, or :data:`missing`
        if the iterable is exhausted. Only peeks one item ahead, caching
        the result in :attr:`_last` for use in subsequent checks. The
        cache is reset when :meth:`__next__` is called.
        """
        pass

    @property
    def last(self) -> bool:
        """Whether this is the last iteration of the loop.

        Causes the iterable to advance early. See
        :func:`itertools.groupby` for issues this can cause.
        The :func:`groupby` filter avoids that issue.
        """
        pass

    @property
    def previtem(self) -> t.Union[t.Any, 'Undefined']:
        """The item in the previous iteration. Undefined during the
        first iteration.
        """
        pass

    @property
    def nextitem(self) -> t.Union[t.Any, 'Undefined']:
        """The item in the next iteration. Undefined during the last
        iteration.

        Causes the iterable to advance early. See
        :func:`itertools.groupby` for issues this can cause.
        The :func:`jinja-filters.groupby` filter avoids that issue.
        """
        pass

    def cycle(self, *args: V) -> V:
        """Return a value from the given args, cycling through based on
        the current :attr:`index0`.

        :param args: One or more values to cycle through.
        """
        pass

    def changed(self, *value: t.Any) -> bool:
        """Return ``True`` if previously called with a different value
        (including when called for the first time).

        :param value: One or more values to compare to the last call.
        """
        pass

    def __iter__(self) -> 'LoopContext':
        return self

    def __next__(self) -> t.Tuple[t.Any, 'LoopContext']:
        if self._after is not missing:
            rv = self._after
            self._after = missing
        else:
            rv = next(self._iterator)
        self.index0 += 1
        self._before = self._current
        self._current = rv
        return (rv, self)

    @internalcode
    def __call__(self, iterable: t.Iterable[V]) -> str:
        """When iterating over nested data, render the body of the loop
        recursively with the given inner iterable data.

        The loop must have the ``recursive`` marker for this to work.
        """
        if self._recurse is None:
            raise TypeError("The loop must have the 'recursive' marker to be called recursively.")
        return self._recurse(iterable, self._recurse, depth=self.depth)

    def __repr__(self) -> str:
        return f'<{type(self).__name__} {self.index}/{self.length}>'

class AsyncLoopContext(LoopContext):
    _iterator: t.AsyncIterator[t.Any]

    def __aiter__(self) -> 'AsyncLoopContext':
        return self

    async def __anext__(self) -> t.Tuple[t.Any, 'AsyncLoopContext']:
        if self._after is not missing:
            rv = self._after
            self._after = missing
        else:
            rv = await self._iterator.__anext__()
        self.index0 += 1
        self._before = self._current
        self._current = rv
        return (rv, self)

class Macro:
    """Wraps a macro function."""

    def __init__(self, environment: 'Environment', func: t.Callable[..., str], name: str, arguments: t.List[str], catch_kwargs: bool, catch_varargs: bool, caller: bool, default_autoescape: t.Optional[bool]=None):
        self._environment = environment
        self._func = func
        self._argument_count = len(arguments)
        self.name = name
        self.arguments = arguments
        self.catch_kwargs = catch_kwargs
        self.catch_varargs = catch_varargs
        self.caller = caller
        self.explicit_caller = 'caller' in arguments
        if default_autoescape is None:
            if callable(environment.autoescape):
                default_autoescape = environment.autoescape(None)
            else:
                default_autoescape = environment.autoescape
        self._default_autoescape = default_autoescape

    @internalcode
    @pass_eval_context
    def __call__(self, *args: t.Any, **kwargs: t.Any) -> str:
        if args and isinstance(args[0], EvalContext):
            autoescape = args[0].autoescape
            args = args[1:]
        else:
            autoescape = self._default_autoescape
        arguments = list(args[:self._argument_count])
        off = len(arguments)
        found_caller = False
        if off != self._argument_count:
            for name in self.arguments[len(arguments):]:
                try:
                    value = kwargs.pop(name)
                except KeyError:
                    value = missing
                if name == 'caller':
                    found_caller = True
                arguments.append(value)
        else:
            found_caller = self.explicit_caller
        if self.caller and (not found_caller):
            caller = kwargs.pop('caller', None)
            if caller is None:
                caller = self._environment.undefined('No caller defined', name='caller')
            arguments.append(caller)
        if self.catch_kwargs:
            arguments.append(kwargs)
        elif kwargs:
            if 'caller' in kwargs:
                raise TypeError(f'macro {self.name!r} was invoked with two values for the special caller argument. This is most likely a bug.')
            raise TypeError(f'macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}')
        if self.catch_varargs:
            arguments.append(args[self._argument_count:])
        elif len(args) > self._argument_count:
            raise TypeError(f'macro {self.name!r} takes not more than {len(self.arguments)} argument(s)')
        return self._invoke(arguments, autoescape)

    def __repr__(self) -> str:
        name = 'anonymous' if self.name is None else repr(self.name)
        return f'<{type(self).__name__} {name}>'

class Undefined:
    """The default undefined type.  This undefined type can be printed and
    iterated over, but every other access will raise an :exc:`UndefinedError`:

    >>> foo = Undefined(name='foo')
    >>> str(foo)
    ''
    >>> not foo
    True
    >>> foo + 42
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined
    """
    __slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name', '_undefined_exception')

    def __init__(self, hint: t.Optional[str]=None, obj: t.Any=missing, name: t.Optional[str]=None, exc: t.Type[TemplateRuntimeError]=UndefinedError) -> None:
        self._undefined_hint = hint
        self._undefined_obj = obj
        self._undefined_name = name
        self._undefined_exception = exc

    @property
    def _undefined_message(self) -> str:
        """Build a message about the undefined value based on how it was
        accessed.
        """
        pass

    @internalcode
    def _fail_with_undefined_error(self, *args: t.Any, **kwargs: t.Any) -> 'te.NoReturn':
        """Raise an :exc:`UndefinedError` when operations are performed
        on the undefined value.
        """
        pass

    @internalcode
    def __getattr__(self, name: str) -> t.Any:
        if name[:2] == '__':
            raise AttributeError(name)
        return self._fail_with_undefined_error()
    __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error
    __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error
    __truediv__ = __rtruediv__ = _fail_with_undefined_error
    __floordiv__ = __rfloordiv__ = _fail_with_undefined_error
    __mod__ = __rmod__ = _fail_with_undefined_error
    __pos__ = __neg__ = _fail_with_undefined_error
    __call__ = __getitem__ = _fail_with_undefined_error
    __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error
    __int__ = __float__ = __complex__ = _fail_with_undefined_error
    __pow__ = __rpow__ = _fail_with_undefined_error

    def __eq__(self, other: t.Any) -> bool:
        return type(self) is type(other)

    def __ne__(self, other: t.Any) -> bool:
        return not self.__eq__(other)

    def __hash__(self) -> int:
        return id(type(self))

    def __str__(self) -> str:
        return ''

    def __len__(self) -> int:
        return 0

    def __iter__(self) -> t.Iterator[t.Any]:
        yield from ()

    async def __aiter__(self) -> t.AsyncIterator[t.Any]:
        for _ in ():
            yield

    def __bool__(self) -> bool:
        return False

    def __repr__(self) -> str:
        return 'Undefined'

def make_logging_undefined(logger: t.Optional['logging.Logger']=None, base: t.Type[Undefined]=Undefined) -> t.Type[Undefined]:
    """Given a logger object this returns a new undefined class that will
    log certain failures.  It will log iterations and printing.  If no
    logger is given a default logger is created.

    Example::

        logger = logging.getLogger(__name__)
        LoggingUndefined = make_logging_undefined(
            logger=logger,
            base=Undefined
        )

    .. versionadded:: 2.8

    :param logger: the logger to use.  If not provided, a default logger
                   is created.
    :param base: the base class to add logging functionality to.  This
                 defaults to :class:`Undefined`.
    """
    pass

class ChainableUndefined(Undefined):
    """An undefined that is chainable, where both ``__getattr__`` and
    ``__getitem__`` return itself rather than raising an
    :exc:`UndefinedError`.

    >>> foo = ChainableUndefined(name='foo')
    >>> str(foo.bar['baz'])
    ''
    >>> foo.bar['baz'] + 42
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined

    .. versionadded:: 2.11.0
    """
    __slots__ = ()

    def __html__(self) -> str:
        return str(self)

    def __getattr__(self, _: str) -> 'ChainableUndefined':
        return self
    __getitem__ = __getattr__

class DebugUndefined(Undefined):
    """An undefined that returns the debug info when printed.

    >>> foo = DebugUndefined(name='foo')
    >>> str(foo)
    '{{ foo }}'
    >>> not foo
    True
    >>> foo + 42
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined
    """
    __slots__ = ()

    def __str__(self) -> str:
        if self._undefined_hint:
            message = f'undefined value printed: {self._undefined_hint}'
        elif self._undefined_obj is missing:
            message = self._undefined_name
        else:
            message = f'no such element: {object_type_repr(self._undefined_obj)}[{self._undefined_name!r}]'
        return f'{{{{ {message} }}}}'

class StrictUndefined(Undefined):
    """An undefined that barks on print and iteration as well as boolean
    tests and all kinds of comparisons.  In other words: you can do nothing
    with it except checking if it's defined using the `defined` test.

    >>> foo = StrictUndefined(name='foo')
    >>> str(foo)
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined
    >>> not foo
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined
    >>> foo + 42
    Traceback (most recent call last):
      ...
    jinja2.exceptions.UndefinedError: 'foo' is undefined
    """
    __slots__ = ()
    __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error
    __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
    __contains__ = Undefined._fail_with_undefined_error
del (Undefined.__slots__, ChainableUndefined.__slots__, DebugUndefined.__slots__, StrictUndefined.__slots__)
[The command completed with exit code 0.]
[Current working directory: /workspace/jinja]
[Python interpreter: /usr/bin/python]
[Command finished with exit code 0]