diff options
| author | Rob Hudson | 2009-09-22 19:14:31 -0700 | 
|---|---|---|
| committer | Rob Hudson | 2009-09-22 19:20:50 -0700 | 
| commit | 6c05fad6a9835ae22481b90d6b58f00f90929663 (patch) | |
| tree | 4034b0eb6cfb628782e3106126e621c6c2ab780d /debug_toolbar/utils/sqlparse/tokens.py | |
| parent | 39174123f6d7371664b448604d421a66bed74cb0 (diff) | |
| download | django-debug-toolbar-6c05fad6a9835ae22481b90d6b58f00f90929663.tar.bz2 | |
Added sqlparse, replacing my simple string replace SQL keywords and updating
management command and SQL panel.
Diffstat (limited to 'debug_toolbar/utils/sqlparse/tokens.py')
| -rw-r--r-- | debug_toolbar/utils/sqlparse/tokens.py | 131 | 
1 files changed, 131 insertions, 0 deletions
| diff --git a/debug_toolbar/utils/sqlparse/tokens.py b/debug_toolbar/utils/sqlparse/tokens.py new file mode 100644 index 0000000..2c63c41 --- /dev/null +++ b/debug_toolbar/utils/sqlparse/tokens.py @@ -0,0 +1,131 @@ +# Copyright (C) 2008 Andi Albrecht, albrecht.andi@gmail.com +# +# This module is part of python-sqlparse and is released under +# the BSD License: http://www.opensource.org/licenses/bsd-license.php. + +# The Token implementation is based on pygment's token system written +# by Georg Brandl. +# http://pygments.org/ + +"""Tokens""" + +try: +    set +except NameError: +    from sets import Set as set + + +class _TokenType(tuple): +    parent = None + +    def split(self): +        buf = [] +        node = self +        while node is not None: +            buf.append(node) +            node = node.parent +        buf.reverse() +        return buf + +    def __init__(self, *args): +        # no need to call super.__init__ +        self.subtypes = set() + +    def __contains__(self, val): +        return self is val or ( +            type(val) is self.__class__ and +            val[:len(self)] == self +        ) + +    def __getattr__(self, val): +        if not val or not val[0].isupper(): +            return tuple.__getattribute__(self, val) +        new = _TokenType(self + (val,)) +        setattr(self, val, new) +        self.subtypes.add(new) +        new.parent = self +        return new + +    def __hash__(self): +        return hash(tuple(self)) + +    def __repr__(self): +        return 'Token' + (self and '.' or '') + '.'.join(self) + + +Token       = _TokenType() + +# Special token types +Text        = Token.Text +Whitespace  = Text.Whitespace +Newline     = Whitespace.Newline +Error       = Token.Error +# Text that doesn't belong to this lexer (e.g. HTML in PHP) +Other       = Token.Other + +# Common token types for source code +Keyword     = Token.Keyword +Name        = Token.Name +Literal     = Token.Literal +String      = Literal.String +Number      = Literal.Number +Punctuation = Token.Punctuation +Operator    = Token.Operator +Wildcard    = Token.Wildcard +Comment     = Token.Comment +Assignment  = Token.Assignement + +# Generic types for non-source code +Generic     = Token.Generic + +# String and some others are not direct childs of Token. +# alias them: +Token.Token = Token +Token.String = String +Token.Number = Number + +# SQL specific tokens +DML = Keyword.DML +DDL = Keyword.DDL +Command = Keyword.Command + +Group = Token.Group +Group.Parenthesis = Token.Group.Parenthesis +Group.Comment = Token.Group.Comment +Group.Where = Token.Group.Where + + +def is_token_subtype(ttype, other): +    """ +    Return True if ``ttype`` is a subtype of ``other``. + +    exists for backwards compatibility. use ``ttype in other`` now. +    """ +    return ttype in other + + +def string_to_tokentype(s): +    """ +    Convert a string into a token type:: + +        >>> string_to_token('String.Double') +        Token.Literal.String.Double +        >>> string_to_token('Token.Literal.Number') +        Token.Literal.Number +        >>> string_to_token('') +        Token + +    Tokens that are already tokens are returned unchanged: + +        >>> string_to_token(String) +        Token.Literal.String +    """ +    if isinstance(s, _TokenType): +        return s +    if not s: +        return Token +    node = Token +    for item in s.split('.'): +        node = getattr(node, item) +    return node + | 
