Home | History | Annotate | Download | only in parse
      1 # Copyright 2014 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import imp
      6 import os.path
      7 import sys
      8 import unittest
      9 
     10 # Disable lint check for finding modules:
     11 # pylint: disable=F0401
     12 
     13 def _GetDirAbove(dirname):
     14   """Returns the directory "above" this file containing |dirname| (which must
     15   also be "above" this file)."""
     16   path = os.path.abspath(__file__)
     17   while True:
     18     path, tail = os.path.split(path)
     19     assert tail
     20     if tail == dirname:
     21       return path
     22 
     23 try:
     24   imp.find_module("ply")
     25 except ImportError:
     26   sys.path.append(os.path.join(_GetDirAbove("mojo"), "third_party"))
     27 from ply import lex
     28 
     29 try:
     30   imp.find_module("mojom")
     31 except ImportError:
     32   sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
     33 import mojom.parse.lexer
     34 
     35 
     36 # This (monkey-patching LexToken to make comparison value-based) is evil, but
     37 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
     38 # for object identity.)
     39 def _LexTokenEq(self, other):
     40   return self.type == other.type and self.value == other.value and \
     41          self.lineno == other.lineno and self.lexpos == other.lexpos
     42 setattr(lex.LexToken, '__eq__', _LexTokenEq)
     43 
     44 
     45 def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
     46   """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
     47   but lexpos is 0-based.)"""
     48   rv = lex.LexToken()
     49   rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
     50   return rv
     51 
     52 
     53 def _MakeLexTokenForKeyword(keyword, **kwargs):
     54   """Makes a LexToken for the given keyword."""
     55   return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
     56 
     57 
     58 class LexerTest(unittest.TestCase):
     59   """Tests |mojom.parse.lexer.Lexer|."""
     60 
     61   def __init__(self, *args, **kwargs):
     62     unittest.TestCase.__init__(self, *args, **kwargs)
     63     # Clone all lexer instances from this one, since making a lexer is slow.
     64     self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
     65 
     66   def testValidKeywords(self):
     67     """Tests valid keywords."""
     68     self.assertEquals(self._SingleTokenForInput("handle"),
     69                       _MakeLexTokenForKeyword("handle"))
     70     self.assertEquals(self._SingleTokenForInput("import"),
     71                       _MakeLexTokenForKeyword("import"))
     72     self.assertEquals(self._SingleTokenForInput("module"),
     73                       _MakeLexTokenForKeyword("module"))
     74     self.assertEquals(self._SingleTokenForInput("struct"),
     75                       _MakeLexTokenForKeyword("struct"))
     76     self.assertEquals(self._SingleTokenForInput("interface"),
     77                       _MakeLexTokenForKeyword("interface"))
     78     self.assertEquals(self._SingleTokenForInput("enum"),
     79                       _MakeLexTokenForKeyword("enum"))
     80     self.assertEquals(self._SingleTokenForInput("const"),
     81                       _MakeLexTokenForKeyword("const"))
     82     self.assertEquals(self._SingleTokenForInput("true"),
     83                       _MakeLexTokenForKeyword("true"))
     84     self.assertEquals(self._SingleTokenForInput("false"),
     85                       _MakeLexTokenForKeyword("false"))
     86     self.assertEquals(self._SingleTokenForInput("default"),
     87                       _MakeLexTokenForKeyword("default"))
     88 
     89   def testValidIdentifiers(self):
     90     """Tests identifiers."""
     91     self.assertEquals(self._SingleTokenForInput("abcd"),
     92                       _MakeLexToken("NAME", "abcd"))
     93     self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
     94                       _MakeLexToken("NAME", "AbC_d012_"))
     95     self.assertEquals(self._SingleTokenForInput("_0123"),
     96                       _MakeLexToken("NAME", "_0123"))
     97 
     98   def testInvalidIdentifiers(self):
     99     with self.assertRaisesRegexp(
    100         mojom.parse.lexer.LexError,
    101         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    102       self._TokensForInput("$abc")
    103     with self.assertRaisesRegexp(
    104         mojom.parse.lexer.LexError,
    105         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    106       self._TokensForInput("a$bc")
    107 
    108   def testDecimalIntegerConstants(self):
    109     self.assertEquals(self._SingleTokenForInput("0"),
    110                       _MakeLexToken("INT_CONST_DEC", "0"))
    111     self.assertEquals(self._SingleTokenForInput("1"),
    112                       _MakeLexToken("INT_CONST_DEC", "1"))
    113     self.assertEquals(self._SingleTokenForInput("123"),
    114                       _MakeLexToken("INT_CONST_DEC", "123"))
    115     self.assertEquals(self._SingleTokenForInput("10"),
    116                       _MakeLexToken("INT_CONST_DEC", "10"))
    117 
    118   def testValidTokens(self):
    119     """Tests valid tokens (which aren't tested elsewhere)."""
    120     # Keywords tested in |testValidKeywords|.
    121     # NAME tested in |testValidIdentifiers|.
    122     self.assertEquals(self._SingleTokenForInput("@123"),
    123                       _MakeLexToken("ORDINAL", "@123"))
    124     self.assertEquals(self._SingleTokenForInput("456"),
    125                       _MakeLexToken("INT_CONST_DEC", "456"))
    126     self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
    127                       _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
    128     self.assertEquals(self._SingleTokenForInput("123.456"),
    129                       _MakeLexToken("FLOAT_CONST", "123.456"))
    130     self.assertEquals(self._SingleTokenForInput("'x'"),
    131                       _MakeLexToken("CHAR_CONST", "'x'"))
    132     self.assertEquals(self._SingleTokenForInput("\"hello\""),
    133                       _MakeLexToken("STRING_LITERAL", "\"hello\""))
    134     self.assertEquals(self._SingleTokenForInput("+"),
    135                       _MakeLexToken("PLUS", "+"))
    136     self.assertEquals(self._SingleTokenForInput("-"),
    137                       _MakeLexToken("MINUS", "-"))
    138     self.assertEquals(self._SingleTokenForInput("&"),
    139                       _MakeLexToken("AMP", "&"))
    140     self.assertEquals(self._SingleTokenForInput("="),
    141                       _MakeLexToken("EQUALS", "="))
    142     self.assertEquals(self._SingleTokenForInput("=>"),
    143                       _MakeLexToken("RESPONSE", "=>"))
    144     self.assertEquals(self._SingleTokenForInput("("),
    145                       _MakeLexToken("LPAREN", "("))
    146     self.assertEquals(self._SingleTokenForInput(")"),
    147                       _MakeLexToken("RPAREN", ")"))
    148     self.assertEquals(self._SingleTokenForInput("["),
    149                       _MakeLexToken("LBRACKET", "["))
    150     self.assertEquals(self._SingleTokenForInput("]"),
    151                       _MakeLexToken("RBRACKET", "]"))
    152     self.assertEquals(self._SingleTokenForInput("{"),
    153                       _MakeLexToken("LBRACE", "{"))
    154     self.assertEquals(self._SingleTokenForInput("}"),
    155                       _MakeLexToken("RBRACE", "}"))
    156     self.assertEquals(self._SingleTokenForInput("<"),
    157                       _MakeLexToken("LANGLE", "<"))
    158     self.assertEquals(self._SingleTokenForInput(">"),
    159                       _MakeLexToken("RANGLE", ">"))
    160     self.assertEquals(self._SingleTokenForInput(";"),
    161                       _MakeLexToken("SEMI", ";"))
    162     self.assertEquals(self._SingleTokenForInput(","),
    163                       _MakeLexToken("COMMA", ","))
    164     self.assertEquals(self._SingleTokenForInput("."),
    165                       _MakeLexToken("DOT", "."))
    166 
    167   def _TokensForInput(self, input_string):
    168     """Gets a list of tokens for the given input string."""
    169     lexer = self._zygote_lexer.clone()
    170     lexer.input(input_string)
    171     rv = []
    172     while True:
    173       tok = lexer.token()
    174       if not tok:
    175         return rv
    176       rv.append(tok)
    177 
    178   def _SingleTokenForInput(self, input_string):
    179     """Gets the single token for the given input string. (Raises an exception if
    180     the input string does not result in exactly one token.)"""
    181     toks = self._TokensForInput(input_string)
    182     assert len(toks) == 1
    183     return toks[0]
    184 
    185 
    186 if __name__ == "__main__":
    187   unittest.main()
    188