
     h	                     \    d Z ddlmZ ddlmZ ddlmZ ddlmZ  G d d          Z	d
d	Z
dS )z	SQL Lexer    )
TextIOBase)tokens)	SQL_REGEX)consumec                   *    e Zd ZdZedd            ZdS )Lexerz?Lexer
    Empty class. Leaving for backwards-compatibility
    Nc              #   R  K   t          | t                    r|                                 } t          | t                    rnt          | t                    rT|r|                     |          } nk	 |                     d          } nT# t          $ r |                     d          } Y n3w xY wt          d                    t          |                               t          |           }|D ]\  }}t          D ]\  }} || |          }|st          |t          j                  r||                                fV  n.t          |          r ||                                          V  t!          ||                                |z
  dz
              nt          j        |fV  dS )a  
        Return an iterable of (tokentype, value) pairs generated from
        `text`. If `unfiltered` is set to `True`, the filtering mechanism
        is bypassed even if filters are defined.

        Also preprocess the text, i.e. expand tabs and strip it if
        wanted and applies registered filters.

        Split ``text`` into (tokentype, text) pairs.

        ``stack`` is the initial stack (default: ``['root']``)
        zutf-8zunicode-escapez+Expected text or file-like object, got {!r}   N)
isinstancer   readstrbytesdecodeUnicodeDecodeError	TypeErrorformattype	enumerater   r   
_TokenTypegroupcallabler   endError)textencodingiterableposcharrexmatchactionms           J/var/www/html/Sam_Eipo/venv/lib/python3.11/site-packages/sqlparse/lexer.py
get_tokenszLexer.get_tokens   s      dJ'' 	99;;DdC   	0e$$ 
	0 9{{8,,9;;w//DD) 9 9 9;;'788DDD9 I"F4::..0 0 0 T??! 	) 	)IC$- ) ) &HT3'' ,(9:: , !''))+++++f%% , &+++++!%%''C-!"3444lD((((	) 	)s   0B B('B(N)__name__
__module____qualname____doc__staticmethodr#        r"   r   r      s>          -) -) -) \-) -) -)r+   r   Nc                 F    t                                          | |          S )zTokenize sql.

    Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
    of ``(token type, value)`` items.
    )r   r#   )sqlr   s     r"   tokenizer.   L   s     77c8,,,r+   r$   )r(   ior   sqlparser   sqlparse.keywordsr   sqlparse.utilsr   r   r.   r*   r+   r"   <module>r3      s                 ' ' ' ' ' ' " " " " " "3) 3) 3) 3) 3) 3) 3) 3)l- - - - - -r+   