Home | History | Annotate | Download | only in parse
      1 # Copyright 2014 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import imp
      6 import os.path
      7 import sys
      8 import unittest
      9 
     10 def _GetDirAbove(dirname):
     11   """Returns the directory "above" this file containing |dirname| (which must
     12   also be "above" this file)."""
     13   path = os.path.abspath(__file__)
     14   while True:
     15     path, tail = os.path.split(path)
     16     assert tail
     17     if tail == dirname:
     18       return path
     19 
     20 sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
     21 from ply import lex
     22 
     23 try:
     24   imp.find_module("mojom")
     25 except ImportError:
     26   sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
     27 import mojom.parse.lexer
     28 
     29 
     30 # This (monkey-patching LexToken to make comparison value-based) is evil, but
     31 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
     32 # for object identity.)
     33 def _LexTokenEq(self, other):
     34   return self.type == other.type and self.value == other.value and \
     35          self.lineno == other.lineno and self.lexpos == other.lexpos
     36 setattr(lex.LexToken, '__eq__', _LexTokenEq)
     37 
     38 
     39 def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
     40   """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
     41   but lexpos is 0-based.)"""
     42   rv = lex.LexToken()
     43   rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
     44   return rv
     45 
     46 
     47 def _MakeLexTokenForKeyword(keyword, **kwargs):
     48   """Makes a LexToken for the given keyword."""
     49   return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
     50 
     51 
     52 class LexerTest(unittest.TestCase):
     53   """Tests |mojom.parse.lexer.Lexer|."""
     54 
     55   def __init__(self, *args, **kwargs):
     56     unittest.TestCase.__init__(self, *args, **kwargs)
     57     # Clone all lexer instances from this one, since making a lexer is slow.
     58     self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
     59 
     60   def testValidKeywords(self):
     61     """Tests valid keywords."""
     62     self.assertEquals(self._SingleTokenForInput("handle"),
     63                       _MakeLexTokenForKeyword("handle"))
     64     self.assertEquals(self._SingleTokenForInput("import"),
     65                       _MakeLexTokenForKeyword("import"))
     66     self.assertEquals(self._SingleTokenForInput("module"),
     67                       _MakeLexTokenForKeyword("module"))
     68     self.assertEquals(self._SingleTokenForInput("struct"),
     69                       _MakeLexTokenForKeyword("struct"))
     70     self.assertEquals(self._SingleTokenForInput("union"),
     71                       _MakeLexTokenForKeyword("union"))
     72     self.assertEquals(self._SingleTokenForInput("interface"),
     73                       _MakeLexTokenForKeyword("interface"))
     74     self.assertEquals(self._SingleTokenForInput("enum"),
     75                       _MakeLexTokenForKeyword("enum"))
     76     self.assertEquals(self._SingleTokenForInput("const"),
     77                       _MakeLexTokenForKeyword("const"))
     78     self.assertEquals(self._SingleTokenForInput("true"),
     79                       _MakeLexTokenForKeyword("true"))
     80     self.assertEquals(self._SingleTokenForInput("false"),
     81                       _MakeLexTokenForKeyword("false"))
     82     self.assertEquals(self._SingleTokenForInput("default"),
     83                       _MakeLexTokenForKeyword("default"))
     84     self.assertEquals(self._SingleTokenForInput("array"),
     85                       _MakeLexTokenForKeyword("array"))
     86     self.assertEquals(self._SingleTokenForInput("map"),
     87                       _MakeLexTokenForKeyword("map"))
     88     self.assertEquals(self._SingleTokenForInput("associated"),
     89                       _MakeLexTokenForKeyword("associated"))
     90 
     91   def testValidIdentifiers(self):
     92     """Tests identifiers."""
     93     self.assertEquals(self._SingleTokenForInput("abcd"),
     94                       _MakeLexToken("NAME", "abcd"))
     95     self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
     96                       _MakeLexToken("NAME", "AbC_d012_"))
     97     self.assertEquals(self._SingleTokenForInput("_0123"),
     98                       _MakeLexToken("NAME", "_0123"))
     99 
    100   def testInvalidIdentifiers(self):
    101     with self.assertRaisesRegexp(
    102         mojom.parse.lexer.LexError,
    103         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    104       self._TokensForInput("$abc")
    105     with self.assertRaisesRegexp(
    106         mojom.parse.lexer.LexError,
    107         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    108       self._TokensForInput("a$bc")
    109 
    110   def testDecimalIntegerConstants(self):
    111     self.assertEquals(self._SingleTokenForInput("0"),
    112                       _MakeLexToken("INT_CONST_DEC", "0"))
    113     self.assertEquals(self._SingleTokenForInput("1"),
    114                       _MakeLexToken("INT_CONST_DEC", "1"))
    115     self.assertEquals(self._SingleTokenForInput("123"),
    116                       _MakeLexToken("INT_CONST_DEC", "123"))
    117     self.assertEquals(self._SingleTokenForInput("10"),
    118                       _MakeLexToken("INT_CONST_DEC", "10"))
    119 
    120   def testValidTokens(self):
    121     """Tests valid tokens (which aren't tested elsewhere)."""
    122     # Keywords tested in |testValidKeywords|.
    123     # NAME tested in |testValidIdentifiers|.
    124     self.assertEquals(self._SingleTokenForInput("@123"),
    125                       _MakeLexToken("ORDINAL", "@123"))
    126     self.assertEquals(self._SingleTokenForInput("456"),
    127                       _MakeLexToken("INT_CONST_DEC", "456"))
    128     self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
    129                       _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
    130     self.assertEquals(self._SingleTokenForInput("123.456"),
    131                       _MakeLexToken("FLOAT_CONST", "123.456"))
    132     self.assertEquals(self._SingleTokenForInput("\"hello\""),
    133                       _MakeLexToken("STRING_LITERAL", "\"hello\""))
    134     self.assertEquals(self._SingleTokenForInput("+"),
    135                       _MakeLexToken("PLUS", "+"))
    136     self.assertEquals(self._SingleTokenForInput("-"),
    137                       _MakeLexToken("MINUS", "-"))
    138     self.assertEquals(self._SingleTokenForInput("&"),
    139                       _MakeLexToken("AMP", "&"))
    140     self.assertEquals(self._SingleTokenForInput("?"),
    141                       _MakeLexToken("QSTN", "?"))
    142     self.assertEquals(self._SingleTokenForInput("="),
    143                       _MakeLexToken("EQUALS", "="))
    144     self.assertEquals(self._SingleTokenForInput("=>"),
    145                       _MakeLexToken("RESPONSE", "=>"))
    146     self.assertEquals(self._SingleTokenForInput("("),
    147                       _MakeLexToken("LPAREN", "("))
    148     self.assertEquals(self._SingleTokenForInput(")"),
    149                       _MakeLexToken("RPAREN", ")"))
    150     self.assertEquals(self._SingleTokenForInput("["),
    151                       _MakeLexToken("LBRACKET", "["))
    152     self.assertEquals(self._SingleTokenForInput("]"),
    153                       _MakeLexToken("RBRACKET", "]"))
    154     self.assertEquals(self._SingleTokenForInput("{"),
    155                       _MakeLexToken("LBRACE", "{"))
    156     self.assertEquals(self._SingleTokenForInput("}"),
    157                       _MakeLexToken("RBRACE", "}"))
    158     self.assertEquals(self._SingleTokenForInput("<"),
    159                       _MakeLexToken("LANGLE", "<"))
    160     self.assertEquals(self._SingleTokenForInput(">"),
    161                       _MakeLexToken("RANGLE", ">"))
    162     self.assertEquals(self._SingleTokenForInput(";"),
    163                       _MakeLexToken("SEMI", ";"))
    164     self.assertEquals(self._SingleTokenForInput(","),
    165                       _MakeLexToken("COMMA", ","))
    166     self.assertEquals(self._SingleTokenForInput("."),
    167                       _MakeLexToken("DOT", "."))
    168 
    169   def _TokensForInput(self, input_string):
    170     """Gets a list of tokens for the given input string."""
    171     lexer = self._zygote_lexer.clone()
    172     lexer.input(input_string)
    173     rv = []
    174     while True:
    175       tok = lexer.token()
    176       if not tok:
    177         return rv
    178       rv.append(tok)
    179 
    180   def _SingleTokenForInput(self, input_string):
    181     """Gets the single token for the given input string. (Raises an exception if
    182     the input string does not result in exactly one token.)"""
    183     toks = self._TokensForInput(input_string)
    184     assert len(toks) == 1
    185     return toks[0]
    186 
    187 
    188 if __name__ == "__main__":
    189   unittest.main()
    190