%PDF- %PDF-
Mini Shell

Mini Shell

Direktori : /snap/core/17212/usr/lib/python3/dist-packages/jinja2/__pycache__/
Upload File :
Create Path :
Current File : //snap/core/17212/usr/lib/python3/dist-packages/jinja2/__pycache__/lexer.cpython-35.pyc



���S	o�<@s�dZddlZddlmZddlmZddlmZddlm	Z	ddl
mZmZm
Z
mZmZe	d�Zejd	ej�Zejd
ej�Zejd�Zyedd
d�Wn!ek
r�ejd�ZYn0XddlmZejdejejf�Zejd�Zejd�Zed�Zed�Z ed�Z!ed�Z"ed�Z#ed�Z$ed�Z%ed�Z&ed�Z'ed�Z(ed�Z)ed�Z*ed �Z+ed!�Z,ed"�Z-ed#�Z.ed$�Z/ed%�Z0ed&�Z1ed'�Z2ed(�Z3ed)�Z4ed*�Z5ed+�Z6ed,�Z7ed-�Z8ed.�Z9ed/�Z:ed0�Z;ed1�Z<ed2�Z=ed3�Z>ed4�Z?ed5�Z@ed6�ZAed7�ZBed8�ZCed9�ZDed:�ZEed;�ZFed<�ZGed=�ZHed>�ZIed?�ZJed@�ZKedA�ZLedB�ZMedC�ZNedD�ZOdEedFe7dGe#dHe&dIe/dJe.dKe2dLe8dMe*dNe4dOe+dPe5dQe)dRe3dSe%dTe0dUe'dVe(dWe,dXe-dYe dZe$d[e!d\e1d]e"d^e6iZPeQd_d`�eeP�D��ZReSeP�eSeR�ksutTda��ejdbd\jUdcdd�eVePdedfdg��D���ZWeXeEeGeFe9eJeKeLg�ZYeXe9eMeGeLg�ZZdhdi�Z[djdk�Z\dldm�Z]dndo�Z^dpdq�Z_Gdrds�dse`�ZaGdtdu�dueb�ZceGdvdw�dwe`��ZdeGdxdy�dye`��Zedzd{�ZfGd|d}�d}e`�ZgdS)~a�
    jinja2.lexer
    ~~~~~~~~~~~~

    This module implements a Jinja / Python combination lexer. The
    `Lexer` class provided by this module is used to do some preprocessing
    for Jinja.

    On the one hand it filters out invalid operators like the bitshift
    operators we don't allow in templates. On the other hand it separates
    template code and python code in expressions.

    :copyright: (c) 2010 by the Jinja Team.
    :license: BSD, see LICENSE for more details.
�N)�
itemgetter)�deque)�TemplateSyntaxError)�LRUCache)�	iteritems�implements_iterator�	text_type�intern�PY2�2z\s+z7('([^'\\]*(?:\\.[^'\\]*)*)'|"([^"\\]*(?:\\.[^"\\]*)*)")z\d+ufööz	<unknown>�evalz\b[a-zA-Z_][a-zA-Z0-9_]*\b)�_stringdefsz	[%s][%s]*z(?<!\.)\d+\.\d+z(\r\n|\r|\n)�addZassignZcolonZcommaZdiv�dot�eq�floordiv�gtZgteqZlbraceZlbracketZlparen�ltZlteq�mod�mul�ne�pipe�powZrbraceZrbracketZrparenZ	semicolon�sub�tildeZ
whitespace�float�integer�name�string�operator�block_begin�	block_endZvariable_begin�variable_end�	raw_begin�raw_endZ
comment_beginZcomment_end�comment�linestatement_begin�linestatement_endZlinecomment_beginZlinecomment_end�linecomment�data�initial�eof�+�-�/z//�*�%z**�~�[�]�(�)�{�}z==z!=�>z>=�<z<=�=�.�:�|�,�;cCs"g|]\}}||f�qS�r@)�.0�k�vr@r@�./usr/lib/python3/dist-packages/jinja2/lexer.py�
<listcomp>�s	rEzoperators droppedz(%s)ccs|]}tj|�VqdS)N)�re�escape)rA�xr@r@rD�	<genexpr>�srI�keycCst|�S)N)�len)rHr@r@rD�<lambda>�srLcCsl|tkrt|Stdtdtdtdtdtdtdtdt	dt
d	td
tdij
||�S)Nzbegin of commentzend of commentr%zbegin of statement blockzend of statement blockzbegin of print statementzend of print statementzbegin of line statementzend of line statementztemplate data / textzend of template)�reverse_operators�TOKEN_COMMENT_BEGIN�TOKEN_COMMENT_END�
TOKEN_COMMENT�TOKEN_LINECOMMENT�TOKEN_BLOCK_BEGIN�TOKEN_BLOCK_END�TOKEN_VARIABLE_BEGIN�TOKEN_VARIABLE_END�TOKEN_LINESTATEMENT_BEGIN�TOKEN_LINESTATEMENT_END�
TOKEN_DATA�	TOKEN_EOF�get)�
token_typer@r@rD�_describe_token_type�sr\cCs#|jdkr|jSt|j�S)z#Returns a description of the token.r)�type�valuer\)�tokenr@r@rD�describe_token�sr`cCsGd|kr7|jdd�\}}|dkr=|Sn|}t|�S)z0Like `describe_token` but for token expressions.r<�r)�splitr\)�exprr]r^r@r@rD�describe_token_expr�srdcCsttj|��S)zsCount the number of newline characters in the string.  This is
    useful for extensions that filter a stream.
    )rK�
newline_re�findall)r^r@r@rD�count_newlines�srgcCs�tj}t|j�d||j�ft|j�d||j�ft|j�d||j�fg}|jdk	r�|jt|j�dd||j�f�|jdk	r�|jt|j�dd||j�f�d	d
�t	|dd�D�S)
zACompiles all the rules from the environment into a list of rules.r%�block�variableNZ
linestatementz	^[ \t\v]*r(z(?:^|(?<=\S))[^\S\r\n]*cSs g|]}|dd��qS)raNr@)rArHr@r@rDrE�s	z!compile_rules.<locals>.<listcomp>�reverseT)
rFrGrK�comment_start_string�block_start_string�variable_start_string�line_statement_prefix�append�line_comment_prefix�sorted)�environment�e�rulesr@r@rD�
compile_rules�s	ruc@s1eZdZdZedd�Zdd�ZdS)�FailurezjClass that raises a `TemplateSyntaxError` if called.
    Used by the `Lexer` to specify known errors.
    cCs||_||_dS)N)�message�error_class)�selfrw�clsr@r@rD�__init__�s	zFailure.__init__cCs|j|j||��dS)N)rxrw)ry�lineno�filenamer@r@rD�__call__�szFailure.__call__N)�__name__�
__module__�__qualname__�__doc__rr{r~r@r@r@rDrv�srvc@szeZdZdZfZdd�ed�D�\ZZZdd�Z	dd�Z
d	d
�Zdd�Zd
d�Z
dS)�TokenzToken class.ccs!|]}tt|��VqdS)N)�propertyr)rArHr@r@rDrI�szToken.<genexpr>�cCs%tj||tt|��|f�S)N)�tuple�__new__r	�str)rzr|r]r^r@r@rDr��sz
Token.__new__cCs7|jtkrt|jS|jdkr0|jS|jS)Nr)r]rMr^)ryr@r@rD�__str__�s
z
Token.__str__cCsE|j|krdSd|krA|jdd�|j|jgkSdS)z�Test a token against a token expression.  This can either be a
        token type or ``'token_type:token_value'``.  This can only test
        against string values and types.
        Tr<raF)r]rbr^)ryrcr@r@rD�test�s
"z
Token.testcGs(x!|D]}|j|�rdSqWdS)z(Test against multiple token expressions.TF)r�)ry�iterablercr@r@rD�test_any�s
zToken.test_anycCsd|j|j|jfS)NzToken(%r, %r, %r))r|r]r^)ryr@r@rD�__repr__szToken.__repr__N)rr�r�r��	__slots__�ranger|r]r^r�r�r�r�r�r@r@r@rDr��s"
r�c@s:eZdZdZdd�Zdd�Zdd�ZdS)	�TokenStreamIteratorz`The iterator for tokenstreams.  Iterate over the stream
    until the eof token is reached.
    cCs
||_dS)N)�stream)ryr�r@r@rDr{szTokenStreamIterator.__init__cCs|S)Nr@)ryr@r@rD�__iter__szTokenStreamIterator.__iter__cCsB|jj}|jtkr1|jj�t��t|j�|S)N)r��currentr]rY�close�
StopIteration�next)ryr_r@r@rD�__next__s
	
zTokenStreamIterator.__next__N)rr�r�r�r{r�r�r@r@r@rDr�sr�c@s�eZdZdZdd�Zdd�Zdd�ZeZedd	�d
d�Z	dd
�Z
dd�Zddd�Zdd�Z
dd�Zdd�Zdd�Zdd�ZdS)�TokenStreamz�A token stream is an iterable that yields :class:`Token`\s.  The
    parser however does not iterate over it but calls :meth:`next` to go
    one token ahead.  The current active token is stored as :attr:`current`.
    cCsYt|�|_t�|_||_||_d|_tdtd�|_	t
|�dS)NFra�)�iter�_iterr�_pushedrr}�closedr��
TOKEN_INITIALr�r�)ry�	generatorrr}r@r@rDr{(s			zTokenStream.__init__cCs
t|�S)N)r�)ryr@r@rDr�1szTokenStream.__iter__cCst|j�p|jjtk	S)N)�boolr�r�r]rY)ryr@r@rD�__bool__4szTokenStream.__bool__cCs|S)Nr@)rHr@r@rDrL8szTokenStream.<lambda>�docz Are we at the end of the stream?cCs|jj|�dS)z Push a token back to the stream.N)r�ro)ryr_r@r@rD�push:szTokenStream.pushcCs/t|�}|j}|j|�||_|S)zLook at the next token.)r�r�r�)ryZ	old_token�resultr@r@rD�look>s
	
	zTokenStream.lookracCs%xt|�D]}t|�q
WdS)zGot n tokens ahead.N)r�r�)ry�nrHr@r@rD�skipFszTokenStream.skipcCs |jj|�rt|�SdS)zqPerform the token test and return the token if it matched.
        Otherwise the return value is `None`.
        N)r�r�r�)ryrcr@r@rD�next_ifKszTokenStream.next_ifcCs|j|�dk	S)z8Like :meth:`next_if` but only returns `True` or `False`.N)r�)ryrcr@r@rD�skip_ifRszTokenStream.skip_ifcCsr|j}|jr'|jj�|_nG|jjtk	rnyt|j�|_Wntk
rm|j�YnX|S)z)Go one token ahead and return the old one)	r�r��popleftr]rYr�r�r�r�)ry�rvr@r@rDr�Vs		
zTokenStream.__next__cCs1t|jjtd�|_d|_d|_dS)zClose the stream.r�NT)r�r�r|rYr�r�)ryr@r@rDr�bs	zTokenStream.closecCs�|jj|�s�t|�}|jjtkrUtd||jj|j|j��td|t	|j�f|jj|j|j��z|jSWdt
|�XdS)z}Expect a given token type and return it.  This accepts the same
        argument as :meth:`jinja2.lexer.Token.test`.
        z(unexpected end of template, expected %r.zexpected token %r, got %rN)r�r�rdr]rYrr|rr}r`r�)ryrcr@r@rD�expecths		zTokenStream.expectN)rr�r�r�r{r�r�Z__nonzero__r�Zeosr�r�r�r�r�r�r�r�r@r@r@rDr�!s	r�cCs�|j|j|j|j|j|j|j|j|j|j	|j
|jf}tj
|�}|dkrt|�}|t|<|S)z(Return a lexer which is probably cached.N)rl�block_end_stringrm�variable_end_stringrk�comment_end_stringrnrp�trim_blocks�
lstrip_blocks�newline_sequence�keep_trailing_newline�_lexer_cacherZ�Lexer)rrrJZlexerr@r@rD�	get_lexer}s"
r�c@sgeZdZdZdd�Zdd�Zddddd�Zddd	d
�Zdddd�ZdS)
r�a
Class that implements a lexer for a given environment. Automatically
    created by the environment class, usually you don't have to do that.

    Note that the lexer is not automatically bound to an environment.
    Multiple environments can share the same lexer.
    csdd�}tj}ttdfttdfttdftt	dft
tdftt
dfg}t|�}|jr~dp�d}i�|jr�|d�}|d||j��}|j|j�}	||	r�d||	jd��p�d7}|j|j�}	||	r'd||	jd��p*d7}|d||j��}
|
j|j�}	|	rxd	||	jd��p{d}d
}d|||j�|||j�f}
d|||j�|||j�f}|
�d
<|�d<nd||j�}
|j|_|j|_d|ddjd||j�|
||j�||j�fg�fdd�|D���tdfdf|d�tdfgt|d||j�||j�|f�ttfdf|d�td�fdfgt |d||j�||j�|f�t!dfg|t"|d||j#�||j#�f�t$dfg|t%|d||j�|
||j�||j�|f�tt&fdf|d�td�fdfgt'|d �t(dfg|t)|d!�t*t+fdfgi|_,dS)"NcSstj|tjtjB�S)N)rF�compile�M�S)rHr@r@rDrL�sz Lexer.__init__.<locals>.<lambda>z\n?r�r,z^%s(.*)z|%sraz(?!%s)z^[ \t]*z%s%s(?!%s)|%s\+?z%s%s%s|%s\+?rhr%z%s�rootz(.*?)(?:%s)r=z4(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))cs5g|]+\}}d||�j||�f�qS)z(?P<%s_begin>\s*%s\-|%s))rZ)rAr��r)�	prefix_rer@rDrE�s	z"Lexer.__init__.<locals>.<listcomp>z#bygroupz.+z(.*?)((?:\-%s\s*|%s)%s)z#popz(.)zMissing end of comment tagz(?:\-%s\s*|%s)%sz
\-%s\s*|%sz1(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))zMissing end of raw directivez	\s*(\n|$)z(.*?)()(?=\n|$))-rFrG�
whitespace_re�TOKEN_WHITESPACE�float_re�TOKEN_FLOAT�
integer_re�
TOKEN_INTEGER�name_re�
TOKEN_NAME�	string_re�TOKEN_STRING�operator_re�TOKEN_OPERATORrur�r�rl�matchrk�grouprmr�r��joinr�rXrNr�rPrOrvrRrSrTr�rU�TOKEN_RAW_BEGIN�
TOKEN_RAW_ENDrVrW�TOKEN_LINECOMMENT_BEGINrQ�TOKEN_LINECOMMENT_ENDrt)ryrr�crsZ	tag_rulesZroot_tag_rulesZblock_suffix_reZno_lstrip_reZ
block_diff�mZcomment_diffZno_variable_reZ	lstrip_reZblock_prefix_reZcomment_prefix_rer@)r�rDr{�s�			))%




zLexer.__init__cCstj|j|�S)z@Called for strings and template data to normalize it to unicode.)rerr�)ryr^r@r@rD�_normalize_newlinesszLexer._normalize_newlinesNcCs7|j||||�}t|j|||�||�S)zCCalls tokeniter + tokenize and wraps it in a token stream.
        )�	tokeniterr��wrap)ry�sourcerr}�stater�r@r@rD�tokenizeszLexer.tokenizec	cs�x�|D]�\}}}|tkr(qn�|dkr=d}n�|dkrRd}nw|dkrdqne|dkr�|j|�}nG|dkr�|}n2|d	kr�t|�}n|d
kr}y2|j|dd��jdd
�jd�}WnYtk
rK}z9t|�jd�dj�}t||||��WYdd}~XnXt	r�y|jd�}Wq�t
k
ryYq�XnL|dkr�t|�}n1|dkr�t|�}n|dkr�t
|}t|||�VqWdS)z�This is called with the stream as returned by `tokenize` and wraps
        every token in a :class:`Token` and converts the value.
        r&r r'r!r#r$r)�keywordrrra�ascii�backslashreplacezunicode-escaper<Nrrr)r#r$���r�)�ignored_tokensr�r��encode�decode�	Exceptionrb�striprr
�UnicodeError�intr�	operatorsr�)	ryr�rr}r|r_r^rs�msgr@r@rDr�$sF			
(

z
Lexer.wrapccsLt|�}|j�}|jrU|rUx+dD]#}|j|�r.|jd�Pq.Wdj|�}d}d}dg}	|dk	r�|dkr�|d ks�td��|	j|d�nd}|j|	d!}
t|�}g}x_xX|
D]\}
}}|
j	||�}|dkr q�|r5|d"kr5q�t
|t�r;x�t|�D]�\}}|j
tkr~|||��qQ|dkr�x�t|j��D]:\}}|dk	r�|||fV||jd�7}Pq�Wtd|
��qQ|j|d�}|s|tkr!|||fV||jd�7}qQWn|j�}|dkr	|dkro|jd�n�|dkr�|jd�n~|dkr�|jd�nb|d#kr	|s�td||||��|j�}||kr	td||f|||��|s|tkr)|||fV||jd�7}|j�}|dk	r�|dkrm|	j�ni|dkr�xZt|j��D]&\}}|dk	r�|	j|�Pq�Wtd|
��n
|	j|�|j|	d$}
n||krtd|
��|}Pq�W||kr!dStd|||f|||��q�WdS)%z�This method tokenizes the text and returns the tokens in a
        generator.  Use this method if you just want to tokenize a template.
        �
�
�
r�rrar�Nrirhz
invalid stateZ_beginr"r!r'z#bygroupz?%r wanted to resolve the token dynamically but no group matchedrr6r7r4r5r2r3zunexpected '%s'zunexpected '%s', expected '%s'z#popzC%r wanted to resolve the new state dynamically but no group matchedz,%r yielded empty string without stack changezunexpected char %r at %d)r�r�r�)zvariablezblockr�)r"z	block_endzlinestatement_end)r7r5r3r�)r�
splitlinesr��endswithror��AssertionErrorrtrKr��
isinstancer��	enumerate�	__class__rvr�	groupdict�count�RuntimeErrorr��ignore_if_emptyr�pop�end)ryr�rr}r��lines�newline�posr|�stackZstatetokensZ
source_lengthZbalancing_stackZregex�tokensZ	new_stater��idxr_rJr^r)Zexpected_opZpos2r@r@rDr�Rs�

		

	
	




zLexer.tokeniter)	rr�r�r�r{r�r�r�r�r@r@r@rDr��s�.r�)hr�rFrr�collectionsrZjinja2.exceptionsrZjinja2.utilsrZjinja2._compatrrrr	r
r�r��Ur�r�r�r��SyntaxErrorr�Zjinja2r
Z	xid_startZxid_continuer�reZ	TOKEN_ADDZTOKEN_ASSIGNZTOKEN_COLONZTOKEN_COMMAZ	TOKEN_DIVZ	TOKEN_DOTZTOKEN_EQZTOKEN_FLOORDIVZTOKEN_GTZ
TOKEN_GTEQZTOKEN_LBRACEZTOKEN_LBRACKETZTOKEN_LPARENZTOKEN_LTZ
TOKEN_LTEQZ	TOKEN_MODZ	TOKEN_MULZTOKEN_NEZ
TOKEN_PIPEZ	TOKEN_POWZTOKEN_RBRACEZTOKEN_RBRACKETZTOKEN_RPARENZTOKEN_SEMICOLONZ	TOKEN_SUBZTOKEN_TILDEr�r�r�r�r�r�rRrSrTrUr�r�rNrOrPrVrWr�r�rQrXr�rYr��dictrMrKr�r�rqr��	frozensetr�r�r\r`rdrgru�objectrvr�r�r�r�r�r�r@r@r@rD�<module>s�(	
$#		
+[

Zerion Mini Shell 1.0