
     f}                        d Z ddlZddlZddlZddlmZmZ ddlmZ ddl	m
Z
mZmZmZ ddlmZmZmZmZmZmZ ddlmZ g dZg d	Z ed
           Z G d de          Z G d de          Z G d de          Z G d de          Z  G d d          Z! e!            Z" G d de#          Z$ G d d          Z%d Z& G d d          Z' e'            Z(d Z) G d d          Z* G d  d!e          Z+ G d" d#e          Z, G d$ d%ee,          Z- G d& d'          Z. G d( d)e-          Z/d* Z0 G d+ d,e,          Z1 G d- d.e-e1          Z2dS )/z
    pygments.lexer
    ~~~~~~~~~~~~~~

    Base lexer classes.

    :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
    N)apply_filtersFilter)get_filter_by_name)ErrorTextOther
_TokenType)get_bool_optget_int_optget_list_optmake_analysatorFutureguess_decode)	regex_opt)Lexer
RegexLexerExtendedRegexLexerDelegatingLexerLexerContextincludeinheritbygroupsusingthisdefaultwords))s   ﻿utf-8)s     zutf-32)s     zutf-32be)s   zutf-16)s   zutf-16bec                     dS )N         )xs    ]/var/www/api.educacionweb.es/myenv/lib/python3.11/site-packages/pip/_vendor/pygments/lexer.py<lambda>r#   !   s    #     c                       e Zd ZdZd ZdS )	LexerMetaz
    This metaclass automagically converts ``analyse_text`` methods into
    static methods which always return float values.
    c                 t    d|v rt          |d                   |d<   t                              | |||          S )Nanalyse_text)r   type__new__)mcsnamebasesds       r"   r*   zLexerMeta.__new__*   s<    Q /.0A B BAn||Cua000r$   N)__name__
__module____qualname____doc__r*   r    r$   r"   r&   r&   $   s-         
1 1 1 1 1r$   r&   c                   T    e Zd ZdZdZdZg Zg Zg Zg Z	dZ
d Zd Zd Zd Zdd	Zd
 ZdS )r   a  
    Lexer for a specific language.

    Basic options recognized:
    ``stripnl``
        Strip leading and trailing newlines from the input (default: True).
    ``stripall``
        Strip all leading and trailing whitespace from the input
        (default: False).
    ``ensurenl``
        Make sure that the input ends with a newline (default: True).  This
        is required for some lexers that consume input linewise.

        .. versionadded:: 1.3

    ``tabsize``
        If given and greater than 0, expand tabs in the input (default: 0).
    ``encoding``
        If given, must be an encoding name. This encoding will be used to
        convert the input string to Unicode, if it is not already a Unicode
        string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
        Latin1 detection.  Can also be ``'chardet'`` to use the chardet
        library, if it is installed.
    ``inencoding``
        Overrides the ``encoding`` if given.
    Nr   c                    || _         t          |dd          | _        t          |dd          | _        t          |dd          | _        t          |dd          | _        |                    dd	          | _        |                    d
          p| j        | _        g | _	        t          |dd          D ]}|                     |           d S )NstripnlTstripallFensurenltabsizer   encodingguess
inencodingfiltersr    )optionsr
   r5   r6   r7   r   r8   getr9   r<   r   
add_filter)selfr=   filter_s      r"   __init__zLexer.__init__a   s    #GY==$Wj%@@$Wj$??"7Iq99J88L11BT]#GY;; 	% 	%GOOG$$$$	% 	%r$   c                 ^    | j         rd| j        j        d| j         dS d| j        j        z  S )Nz<pygments.lexers.z with >z<pygments.lexers.%s>)r=   	__class__r/   r@   s    r"   __repr__zLexer.__repr__m   sH    < 	D 	D59^5L5L5L59\\\C C *DN,CCCr$   c                 ~    t          |t                    st          |fi |}| j                            |           dS )z8
        Add a new stream filter to this lexer.
        N)
isinstancer   r   r<   append)r@   rA   r=   s      r"   r?   zLexer.add_filtert   sG     '6** 	=(<<G<<GG$$$$$r$   c                     dS )a~  
        Has to return a float between ``0`` and ``1`` that indicates
        if a lexer wants to highlight this text. Used by ``guess_lexer``.
        If this method returns ``0`` it won't highlight it in any case, if
        it returns ``1`` highlighting with this lexer is guaranteed.

        The `LexerMeta` metaclass automatically wraps this function so
        that it works like a static method (no ``self`` or ``cls``
        parameter) and the return value is automatically converted to
        `float`. If the return value is an object that is boolean `False`
        it's the same as if the return values was ``0.0``.
        Nr    )texts    r"   r(   zLexer.analyse_text|   s      r$   Fc                     t          t                    s9 j        dk    rt                    \  }nF j        dk    r	 ddlm} n"# t          $ r}t          d          |d}~ww xY wd}t          D ]G\  }}                    |          r-t          |          d         
                    |d          } nH|H|                    dd                   }	
                    |	                    d	          pd
d          }|ns
                     j                                      d          rt          d          d         n,                    d          rt          d          d                             dd                              dd           j        r                                n j        r                    d           j        dk    r                     j                   j        r                    d          sdz   fd}
 |
            }|st+          | j                   }|S )a=  
        Return an iterable of (tokentype, value) pairs generated from
        `text`. If `unfiltered` is set to `True`, the filtering mechanism
        is bypassed even if filters are defined.

        Also preprocess the text, i.e. expand tabs and strip it if
        wanted and applies registered filters.
        r:   chardetr   )rN   zkTo enable chardet encoding guessing, please install the chardet library from http://chardet.feedparser.org/Nreplacei   r9   r   u   ﻿z

c               3   P   K                                  D ]\  } }}||fV  d S N)get_tokens_unprocessed)_tvr@   rL   s      r"   streamerz"Lexer.get_tokens.<locals>.streamer   sC      66t<<  1ad



 r$   )rI   strr9   r   pip._vendorrN   ImportError_encoding_map
startswithlendecodedetectr>   rO   r6   stripr5   r8   
expandtabsr7   endswithr   r<   )r@   rL   
unfilteredrU   rN   edecodedbomr9   encrX   streams   ``          r"   
get_tokenszLexer.get_tokens   s    $$$ 	,}''&t,,aa)++T3333333" T T T% 'L M MRSTT
 %2  MCs++ "&s3xxyy/"8"89"M"M ?!..ete55C"kk#''**=*=*H*35 5G{{4=11??8,, 0H/Dx(( ,CMMNN+ ||FD))||D$''= 	$::<<DD\ 	$::d##D<!??4<00D= 	t!4!4 	DLD	 	 	 	 	 	  	?"64<>>Fs   A 
A*A%%A*c                     t           )z
        Return an iterable of (index, tokentype, value) pairs where "index"
        is the starting position of the token within the input text.

        In subclasses, implement this method as a generator to
        maximize effectiveness.
        )NotImplementedError)r@   rL   s     r"   rT   zLexer.get_tokens_unprocessed   s
     "!r$   )F)r/   r0   r1   r2   r,   urlaliases	filenamesalias_filenames	mimetypespriorityrB   rG   r?   r(   rj   rT   r    r$   r"   r   r   0   s         8 D C G I O I H
% 
% 
%D D D% % %  9 9 9 9v" " " " "r$   r   )	metaclassc                   "    e Zd ZdZefdZd ZdS )r   a   
    This lexer takes two lexer as arguments. A root lexer and
    a language lexer. First everything is scanned using the language
    lexer, afterwards all ``Other`` tokens are lexed using the root
    lexer.

    The lexers from the ``template`` lexer package use this base lexer.
    c                 l     |di || _          |di || _        || _        t          j        | fi | d S Nr    )
root_lexerlanguage_lexerneedler   rB   )r@   _root_lexer_language_lexer_needler=   s        r"   rB   zDelegatingLexer.__init__   sV    %+0000-o8888t''w'''''r$   c                    d}g }g }| j                             |          D ]U\  }}}|| j        u r.|r&|                    t	          |          |f           g }||z  }=|                    |||f           V|r$|                    t	          |          |f           t          || j                            |                    S )N )rx   rT   ry   rJ   r^   do_insertionsrw   )r@   rL   buffered
insertions
lng_bufferirV   rW   s           r"   rT   z&DelegatingLexer.get_tokens_unprocessed   s    

*AA$GG 	- 	-GAq!DK $%%s8}}j&ABBB!#JA!!1a),,,, 	;s8}}j9:::Z!_CCHMMO O 	Or$   N)r/   r0   r1   r2   r   rB   rT   r    r$   r"   r   r      sL          >C ( ( ( (O O O O Or$   r   c                       e Zd ZdZdS )r   zI
    Indicates that a state should include rules from another state.
    Nr/   r0   r1   r2   r    r$   r"   r   r      s          	Dr$   r   c                       e Zd ZdZd ZdS )_inheritzC
    Indicates the a state should inherit from its superclass.
    c                     dS )Nr   r    rF   s    r"   rG   z_inherit.__repr__  s    yr$   N)r/   r0   r1   r2   rG   r    r$   r"   r   r      s-             r$   r   c                       e Zd ZdZd Zd ZdS )combinedz:
    Indicates a state combined from multiple states.
    c                 8    t                               | |          S rS   )tupler*   )clsargss     r"   r*   zcombined.__new__  s    }}S$'''r$   c                     d S rS   r    )r@   r   s     r"   rB   zcombined.__init__  s    r$   N)r/   r0   r1   r2   r*   rB   r    r$   r"   r   r     s<         ( ( (    r$   r   c                   <    e Zd ZdZd Zd	dZd	dZd	dZd Zd Z	dS )
_PseudoMatchz:
    A pseudo match object constructed from a string.
    c                 "    || _         || _        d S rS   )_text_start)r@   startrL   s      r"   rB   z_PseudoMatch.__init__  s    
r$   Nc                     | j         S rS   )r   r@   args     r"   r   z_PseudoMatch.start  s
    {r$   c                 :    | j         t          | j                  z   S rS   )r   r^   r   r   s     r"   endz_PseudoMatch.end!  s    {S__,,r$   c                 2    |rt          d          | j        S )NzNo such group)
IndexErrorr   r   s     r"   groupz_PseudoMatch.group$  s      	._---zr$   c                     | j         fS rS   )r   rF   s    r"   groupsz_PseudoMatch.groups)  s    
}r$   c                     i S rS   r    rF   s    r"   	groupdictz_PseudoMatch.groupdict,  s    	r$   rS   )
r/   r0   r1   r2   rB   r   r   r   r   r   r    r$   r"   r   r     s              - - - -   
      r$   r   c                       d fd	}|S )zL
    Callback that yields multiple actions for each group in the match.
    Nc           
   3     K   t                    D ]\  }}|t          |          t          u r8|                    |dz             }|r|                    |dz             ||fV  V|                    |dz             }|Y|r|                    |dz             |_         || t          |                    |dz             |          |          D ]}|r|V  	|r|                                |_        d S d S )N   )	enumerater)   r	   r   r   posr   r   )lexermatchctxr   actiondataitemr   s          r"   callbackzbygroups.<locals>.callback4  s*     "4 	' 	'IAv~f++{{1q5)) ;++a!e,,fd::::{{1q5))# 5"'++a!e"4"4 &u'3EKKA4F4F'M'Ms!T !T ' ' '"&JJJ 	"iikkCGGG	" 	"r$   rS   r    )r   r   s   ` r"   r   r   0  s(    " " " " " "& Or$   c                       e Zd ZdZdS )_ThiszX
    Special singleton used for indicating the caller class.
    Used by ``using``.
    Nr   r    r$   r"   r   r   J  s           r$   r   c                      i dv r>                     d          }t          |t          t          f          r|d<   nd|fd<    t          u rdfd	}nd fd	}|S )a  
    Callback that processes the match with a different lexer.

    The keyword arguments are forwarded to the lexer, except `state` which
    is handled separately.

    `state` specifies the state that the new lexer will start in, and can
    be an enumerable such as ('root', 'inline', 'string') or a simple
    string which is assumed to be on top of the root state.

    Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
    statestackrootNc              3   (  K   	r(	                     | j                    | j        di 	}n| }|                                } |j        |                                fi D ]\  }}}||z   ||fV  |r|                                |_        d S d S rv   )updater=   rE   r   rT   r   r   r   )
r   r   r   lxsr   rV   rW   	gt_kwargskwargss
           r"   r   zusing.<locals>.callbacki  s        em,,,$U_..v..A424U[[]]PPiPP " "1a!eQk!!!! &))++& &r$   c              3     K   
                     | j                    di 
}|                                } |j        |                                fi 	D ]\  }}}||z   ||fV  |r|                                |_        d S d S rv   )r   r=   r   rT   r   r   r   )r   r   r   r   r   r   rV   rW   _otherr   r   s           r"   r   zusing.<locals>.callbackx  s      MM%-(((!!&!!BA424U[[]]PPiPP " "1a!eQk!!!! &))++& &r$   rS   )poprI   listr   r   )r   r   r   r   r   s   ``  @r"   r   r   S  s     I&JJwa$'' 	-!"Ig"(!Ig~~	& 	& 	& 	& 	& 	& 	& 	&		& 		& 		& 		& 		& 		& 		& 		& Or$   c                       e Zd ZdZd ZdS )r   z
    Indicates a state or state action (e.g. #pop) to apply.
    For example default('#pop') is equivalent to ('', Token, '#pop')
    Note that state tuples may be used as well.

    .. versionadded:: 2.0
    c                     || _         d S rS   )r   )r@   r   s     r"   rB   zdefault.__init__  s    


r$   N)r/   r0   r1   r2   rB   r    r$   r"   r   r     s-             r$   r   c                        e Zd ZdZddZd ZdS )r   z
    Indicates a list of literal words that is transformed into an optimized
    regex that matches any of the words.

    .. versionadded:: 2.0
    r~   c                 0    || _         || _        || _        d S rS   )r   prefixsuffix)r@   r   r   r   s       r"   rB   zwords.__init__  s    
r$   c                 D    t          | j        | j        | j                  S )Nr   r   )r   r   r   r   rF   s    r"   r>   z	words.get  s    DKLLLLr$   N)r~   r~   )r/   r0   r1   r2   rB   r>   r    r$   r"   r   r     sF            
M M M M Mr$   r   c                   >    e Zd ZdZd Zd Zd Zd Zd
dZd Z	d	 Z
dS )RegexLexerMetazw
    Metaclass for RegexLexer, creates the self._tokens attribute from
    self.tokens on the first instantiation.
    c                     t          |t                    r|                                }t          j        ||          j        S )zBPreprocess the regular expression component of a token definition.)rI   r   r>   recompiler   )r   regexrflagsr   s       r"   _process_regexzRegexLexerMeta._process_regex  s6    eV$$ 	 IIKKEz%((..r$   c                 j    t          |          t          u st          |          sJ d|            |S )z5Preprocess the token component of a token definition.z0token type must be simple type or callable, not )r)   r	   callable)r   tokens     r"   _process_tokenzRegexLexerMeta._process_token  s<    E{{j((HUOO(((DIEK )((r$   c                 2   t          |t                    rJ|dk    rdS ||v r|fS |dk    r|S |dd         dk    rt          |dd                    S J d|z              t          |t                    rfd	| j        z  }| xj        d
z  c_        g }|D ]?}||k    sJ d|z              |                    |                     |||                     @|||<   |fS t          |t                    r|D ]}||v s|dv sJ d|z               |S J d|z              )z=Preprocess the state transition action of a token definition.#pop#pushN   z#pop:Fzunknown new state %rz_tmp_%dr   zcircular state ref %r)r   r   zunknown new state zunknown new state def %r)rI   rY   intr   _tmpnameextend_process_stater   )r   	new_stateunprocessed	processed	tmp_stateitokensistates          r"   _process_new_statez!RegexLexerMeta._process_new_state  s   i%% 	AF""rk))!|#g%%  2A2'))IabbM****@4y@@@@	8,, 	A!CL0ILLALLG# F F***,Cf,L***s11+2;V E  E F F F F#*Ii <	5)) 	A# 2 2+--"3333(61 433@4y@@@@r$   c                 2   t          |          t          u sJ d|z              |d         dk    sJ d|z              ||v r||         S g x}||<   | j        }||         D ]}t          |t                    rK||k    sJ d|z              |                    |                     ||t          |                               ct          |t                    ryt          |t                    rL| 	                    |j
        ||          }|                    t          j        d          j        d|f           t          |          t          u sJ d|z              	 |                     |d         ||          }n4# t"          $ r'}	t%          d	|d         d
|d| d|	          |	d}	~	ww xY w|                     |d                   }
t)          |          dk    rd}n| 	                    |d         ||          }|                    ||
|f           |S )z%Preprocess a single state definition.zwrong state name %rr   #zinvalid state name %rzcircular state reference %rr~   Nzwrong rule def %rzuncompilable regex z
 in state z of z: r      )r)   rY   flagsrI   r   r   r   r   r   r   r   rJ   r   r   r   r   r   	Exception
ValueErrorr   r^   )r   r   r   r   tokensr   tdefr   rexerrr   s              r"   r   zRegexLexerMeta._process_state  sg   E{{c!!!#85#@!!!Qx3 7% ?IU##$&&5!& !	3 !	3D$(( u}}}&Ce&K}}}c00i14T< < = = =$))  $(( 224:{IVV	rz"~~3T9EFFF::&&&(;d(B&&&F((a&%@@ F F F j"&q'''555###ss"< = =BEFF &&tAw//E4yyA~~ 		22473>	K K	 MM3y12222s   E;;
F,"F''F,Nc                     i x}| j         |<   |p| j        |         }t          |          D ]}|                     |||           |S )z-Preprocess a dictionary of token definitions.)_all_tokensr   r   r   )r   r,   	tokendefsr   r   s        r"   process_tokendefzRegexLexerMeta.process_tokendef  sZ    ,..	COD)1D!1	)__ 	< 	<Ey)U;;;;r$   c                    i }i }| j         D ]}|j                            di           }|                                D ]\  }}|                    |          }|7|||<   	 |                    t
                    }n# t          $ r Y Iw xY w|||<   S|                    |d          }|l||||dz   <   	 |                    t
                    }	||	z   ||<   # t          $ r Y w xY w|S )a  
        Merge tokens from superclasses in MRO order, returning a single tokendef
        dictionary.

        Any state that is not defined by a subclass will be inherited
        automatically.  States that *are* defined by subclasses will, by
        default, override that state in the superclass.  If a subclass wishes to
        inherit definitions from a superclass, it can use the special value
        "inherit", which will cause the superclass' state definition to be
        included at that point in the state.
        r   Nr   )__mro____dict__r>   itemsindexr   r   r   )
r   r   inheritablectoksr   r   curitemsinherit_ndxnew_inh_ndxs
             r"   get_tokendefszRegexLexerMeta.get_tokendefs
  sG     	C 	CA:>>(B//D $

 C Cu!::e,,#
 %*F5M!&+kk'&:&:% ! ! ! !)4K&)ooeT::& 7<[]23C #(++g"6"6K *5{)BK&& "   D3C< s$   A::
BB4C
C$#C$c                     d| j         vrSi | _        d| _        t          | d          r| j        rn-|                     d|                                           | _        t          j	        | g|R i |S )z:Instantiate cls after preprocessing its token definitions._tokensr   token_variantsr~   )
r   r   r   hasattrr   r   r   r   r)   __call__)r   r   kwdss      r"   r   zRegexLexerMeta.__call__;  s    CL(( COCLs,-- L#2D L!222s7H7H7J7JKK}S040004000r$   rS   )r/   r0   r1   r2   r   r   r   r   r   r   r   r    r$   r"   r   r     s         
/ / /  !A !A !AF* * *X   / / /b1 1 1 1 1r$   r   c                   ,    e Zd ZdZej        Zi ZddZdS )r   z
    Base for simple stateful regular expression-based lexers.
    Simplifies the lexing process so that you need only
    provide a list of states and regular expressions.
    r   c              #     K   d}| j         }t          |          }||d                  }	 |D ]p\  }}}	 |||          }
|
rZ|Bt          |          t          u r|||
                                fV  n || |
          E d{V  |
                                }|	t          |	t                    rk|	D ]g}|dk    r(t          |          dk    r|	                                 0|dk    r|
                    |d                    R|
                    |           hnpt          |	t                    r,t          |	          t          |          k    r|dd= n5||	d= n/|	dk    r|
                    |d                    nJ d|	z              ||d                  } nVr	 ||         d	k    rd
g}|d
         }|t          d	fV  |dz  }|t          ||         fV  |dz  }n# t          $ r Y dS w xY w)z~
        Split ``text`` into (tokentype, text) pairs.

        ``stack`` is the initial stack (default: ``['root']``)
        r   r   r   Nr   r   Fwrong state def: %rrP   r   )r   r   r)   r	   r   r   rI   r   r^   r   rJ   r   absr   r   r   )r@   rL   r   r   r   
statestackstatetokensrexmatchr   r   mr   s               r"   rT   z!RegexLexer.get_tokens_unprocessedl  s      L	%[[

2/1	/: 0 0+&)HT3'' )<<:55"%vqwwyy"88888'-vdA6666666%%''C ,%i77 L)2 = =#(F??'*:':':(2(8(8(8%*g%5%5$.$5$5jn$E$E$E$E$.$5$5e$<$<$<$<= (	377 L  #9~~Z@@$.qrrNN$.yzz$:$:&'11&--jn====K*?)*KKKK&/
2&?E?FCyD((&,X
&/&7!4o---q ud3i////1HCC!   EEa1	s   (G! 	G! !
G/.G/Nr  )	r/   r0   r1   r2   r   	MULTILINEr   r   rT   r    r$   r"   r   r   I  sB          LE0 F; ; ; ; ; ;r$   r   c                        e Zd ZdZddZd ZdS )r   z9
    A helper object that holds lexer position data.
    Nc                 b    || _         || _        |pt          |          | _        |pdg| _        d S )Nr   )rL   r   r^   r   r   )r@   rL   r   r   r   s        r"   rB   zLexerContext.__init__  s4    	##d))&vh


r$   c                 8    d| j         d| j        d| j        dS )NzLexerContext(z, ))rL   r   r   rF   s    r"   rG   zLexerContext.__repr__  s'     IIItxxx- 	-r$   NN)r/   r0   r1   r2   rB   rG   r    r$   r"   r   r     sA         ' ' ' '- - - - -r$   r   c                       e Zd ZdZddZdS )r   zE
    A RegexLexer that uses a context object to store its state.
    Nc              #   `  K   | j         }|st          |d          }|d         }n|}||j        d                  }|j        }	 |D ]\  }}} |||j        |j                  }	|	r|vt          |          t          u r8|j        ||	                                fV  |	                                |_        n( || |	|          E d{V  |s||j        d                  }|5t          |t                    r|D ]}
|
dk    r2t          |j                  dk    r|j                                         :|
dk    r&|j                            |j        d                    f|j                            |
           nt          |t                    r;t          |          t          |j                  k    r|j        dd= nD|j        |d= n9|dk    r&|j                            |j        d                    nJ d	|z              ||j        d                  } n	 |j        |j        k    rdS ||j                 d
k    r3dg|_        |d         }|j        t           d
fV  |xj        dz  c_        /|j        t"          ||j                 fV  |xj        dz  c_        n# t$          $ r Y dS w xY wn)z
        Split ``text`` into (tokentype, text) pairs.
        If ``context`` is given, use this lexer context instead.
        r   r   r   r   Nr   r   Fr  rP   )r   r   r   rL   r   r   r)   r	   r   rI   r   r^   r   rJ   r   r  r   r   r   )r@   rL   contextr   r   r  r	  r   r   r
  r   s              r"   rT   z)ExtendedRegexLexer.get_tokens_unprocessed  s     
 L	 	tQ''C#F+KKC#CIbM2K8D3	/: 2 2+&)HT37CG44 !)<<:55"%'617799"<<<<&'eeggCGG'-vdAs';';;;;;;;;#, G.7	".F ,%i77 L)2 < <#(F??'*39~~'9'9(+	%*g%5%5$'I$4$4SYr]$C$C$C$C$'I$4$4U$;$;$;$;< (	377 	L"9~~SY??$'IabbMM$'Iijj$9$9&'11I,,SYr];;;;K*?)*KKKK&/	"&>EC!Fw#'))CG},,%+H	&/&7!gtT11111 '5$sw-7777GGqLGGG!   EEe3	s   J ,AJ 0,J 
J+*J+r  )r/   r0   r1   r2   rT   r    r$   r"   r   r     s8         @ @ @ @ @ @r$   r   c              #     K   t          |           } 	 t          |           \  }}n# t          $ r |E d{V  Y dS w xY wd}d}|D ]\  }}}||}d}	|r|t          |          z   |k    r||	||z
           }
|
r|||
fV  |t          |
          z  }|D ]\  }}}|||fV  |t          |          z  } ||z
  }		 t          |           \  }}n# t          $ r d}Y nw xY w|r|t          |          z   |k    |	t          |          k     r$||||	d         fV  |t          |          |	z
  z  }|rQ|pd}|D ]\  }}}|||fV  |t          |          z  } 	 t          |           \  }}n# t          $ r d}Y dS w xY w|OdS dS )ag  
    Helper for lexers which must combine the results of several
    sublexers.

    ``insertions`` is a list of ``(index, itokens)`` pairs.
    Each ``itokens`` iterable should be inserted at position
    ``index`` into the token stream given by the ``tokens``
    argument.

    The result is a combined token stream.

    TODO: clean up the code here.
    NTr   F)iternextStopIterationr^   )r   r   r   r   realposinsleftr   rV   rW   olditmpvalit_indexit_tokenit_valueps                  r"   r   r     s{      j!!Jj))ww   
 GG  % %1a?G 	!c!ff*--tEAI~&F 'q&((((3v;;&07 ) ),(Hx11113x==(19D!%j!1!1ww      	!c!ff*-- #a&&==1ah&&&&s1vv}$G  
,Q 	 	GAq!1a-s1vvGG	!*--NE77 	 	 	GEE	  
 
 
 
 
s0   & <<9CCCE* *E:9E:c                       e Zd ZdZd ZdS )ProfilingRegexLexerMetaz>Metaclass for ProfilingRegexLexer, collects regex timing info.c                      t          |t                    r"t          |j        |j        |j                  n|t          j        |          t          j        f fd	}|S )Nr   c                    j         d                             
	fddg          }t          j                    }                    | ||          }t          j                    }|dxx         dz  cc<   |dxx         ||z
  z  cc<   |S )Nr   r   r   r   )
_prof_data
setdefaulttimer   )rL   r   endposinfot0rest1r   compiledr   r   s          r"   
match_funcz:ProfilingRegexLexerMeta._process_regex.<locals>.match_funcM  s    >"%00%3xHHDB..sF33CBGGGqLGGGGGGrBwGGGJr$   )	rI   r   r   r   r   r   r   sysmaxsize)r   r   r   r   r.  r-  r   s   `  ` @@r"   r   z&ProfilingRegexLexerMeta._process_regexE  s    eU## 	EK#(<1 1 1CC C:c6**), 	 	 	 	 	 	 	 	 	 r$   N)r/   r0   r1   r2   r   r    r$   r"   r"  r"  B  s)        HH    r$   r"  c                   "    e Zd ZdZg ZdZddZdS )ProfilingRegexLexerzFDrop-in replacement for RegexLexer that does profiling of its regexes.   r  c              #   ~   K    j         j                            i            t                               ||          E d {V   j         j                                        }t          d |                                D              fdd          }t          d |D                       }t                       t          d j         j
        t          |          |fz             t          d           t          dd	z             t          d
           |D ]}t          d|z             t          d           d S )Nc              3      K   | ]Y\  \  }}\  }}|t          |                              d                               dd          dd         |d|z  d|z  |z  fV  ZdS )zu'z\\\NA   i  )reprra   rO   ).0r   rnrV   s        r"   	<genexpr>z=ProfilingRegexLexer.get_tokens_unprocessed.<locals>.<genexpr>c  s       @ @+FQFQ 477==//77EEcrcJ4!8TAX\3 @ @ @ @ @ @r$   c                     | j                  S rS   )_prof_sort_index)r!   r@   s    r"   r#   z<ProfilingRegexLexer.get_tokens_unprocessed.<locals>.<lambda>f  s    Ad&;$< r$   T)keyreversec              3   &   K   | ]}|d          V  dS )   Nr    )r9  r!   s     r"   r<  z=ProfilingRegexLexer.get_tokens_unprocessed.<locals>.<genexpr>h  s&      ++!++++++r$   z2Profiling result for %s lexing %d chars in %.3f mszn==============================================================================================================z$%-20s %-64s ncalls  tottime  percall)r   r   zn--------------------------------------------------------------------------------------------------------------z%-20s %-65s %5d %8.4f %8.4f)rE   r%  rJ   r   rT   r   sortedr   sumprintr/   r^   )r@   rL   r   rawdatar   	sum_totalr.   s   `      r"   rT   z*ProfilingRegexLexer.get_tokens_unprocessed^  so     !((,,,44T4GGGGGGGGG.+//11 @ @/6}}@ @ @ =<<<"	$ $ $
 ++d+++++	B~&D		9=> 	? 	? 	?i47IIJJJi 	5 	5A/!34444ir$   Nr  )r/   r0   r1   r2   r%  r>  rT   r    r$   r"   r2  r2  X  s9        PPJ     r$   r2  )3r2   r   r/  r'  pip._vendor.pygments.filterr   r   pip._vendor.pygments.filtersr   pip._vendor.pygments.tokenr   r   r   r	   pip._vendor.pygments.utilr
   r   r   r   r   r   pip._vendor.pygments.regexoptr   __all__r\   staticmethod_default_analyser)   r&   r   r   rY   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r"  r2  r    r$   r"   <module>rP     sF    
			 



  = = = = = = = = ; ; ; ; ; ; E E E E E E E E E E E E* * * * * * * * * * * * * * * * 3 3 3 3 3 3  
, , ,  <.. 	1 	1 	1 	1 	1 	1 	1 	1]" ]" ]" ]" ]"i ]" ]" ]" ]"@O O O O Oe O O ON	 	 	 	 	c 	 	 	        (**
 
 
 
 
u 
 
 
       6  4        uww/ / /d	 	 	 	 	 	 	 	M M M M MF M M M e1 e1 e1 e1 e1Y e1 e1 e1P^ ^ ^ ^ ^. ^ ^ ^ ^B- - - - - - - - E E E E E E E EP= = =@    n   ,    *0G      r$   