Home | History | Annotate | Download | only in parse
      1 # Copyright 2014 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import imp
      6 import os.path
      7 import sys
      8 import unittest
      9 
     10 def _GetDirAbove(dirname):
     11   """Returns the directory "above" this file containing |dirname| (which must
     12   also be "above" this file)."""
     13   path = os.path.abspath(__file__)
     14   while True:
     15     path, tail = os.path.split(path)
     16     assert tail
     17     if tail == dirname:
     18       return path
     19 
     20 try:
     21   imp.find_module("ply")
     22 except ImportError:
     23   sys.path.append(os.path.join(_GetDirAbove("mojo"), "third_party"))
     24 from ply import lex
     25 
     26 try:
     27   imp.find_module("mojom")
     28 except ImportError:
     29   sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
     30 import mojom.parse.lexer
     31 
     32 
     33 # This (monkey-patching LexToken to make comparison value-based) is evil, but
     34 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
     35 # for object identity.)
     36 def _LexTokenEq(self, other):
     37   return self.type == other.type and self.value == other.value and \
     38          self.lineno == other.lineno and self.lexpos == other.lexpos
     39 setattr(lex.LexToken, '__eq__', _LexTokenEq)
     40 
     41 
     42 def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
     43   """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
     44   but lexpos is 0-based.)"""
     45   rv = lex.LexToken()
     46   rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
     47   return rv
     48 
     49 
     50 def _MakeLexTokenForKeyword(keyword, **kwargs):
     51   """Makes a LexToken for the given keyword."""
     52   return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
     53 
     54 
     55 class LexerTest(unittest.TestCase):
     56   """Tests |mojom.parse.lexer.Lexer|."""
     57 
     58   def __init__(self, *args, **kwargs):
     59     unittest.TestCase.__init__(self, *args, **kwargs)
     60     # Clone all lexer instances from this one, since making a lexer is slow.
     61     self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
     62 
     63   def testValidKeywords(self):
     64     """Tests valid keywords."""
     65     self.assertEquals(self._SingleTokenForInput("handle"),
     66                       _MakeLexTokenForKeyword("handle"))
     67     self.assertEquals(self._SingleTokenForInput("import"),
     68                       _MakeLexTokenForKeyword("import"))
     69     self.assertEquals(self._SingleTokenForInput("module"),
     70                       _MakeLexTokenForKeyword("module"))
     71     self.assertEquals(self._SingleTokenForInput("struct"),
     72                       _MakeLexTokenForKeyword("struct"))
     73     self.assertEquals(self._SingleTokenForInput("union"),
     74                       _MakeLexTokenForKeyword("union"))
     75     self.assertEquals(self._SingleTokenForInput("interface"),
     76                       _MakeLexTokenForKeyword("interface"))
     77     self.assertEquals(self._SingleTokenForInput("enum"),
     78                       _MakeLexTokenForKeyword("enum"))
     79     self.assertEquals(self._SingleTokenForInput("const"),
     80                       _MakeLexTokenForKeyword("const"))
     81     self.assertEquals(self._SingleTokenForInput("true"),
     82                       _MakeLexTokenForKeyword("true"))
     83     self.assertEquals(self._SingleTokenForInput("false"),
     84                       _MakeLexTokenForKeyword("false"))
     85     self.assertEquals(self._SingleTokenForInput("default"),
     86                       _MakeLexTokenForKeyword("default"))
     87     self.assertEquals(self._SingleTokenForInput("array"),
     88                       _MakeLexTokenForKeyword("array"))
     89     self.assertEquals(self._SingleTokenForInput("map"),
     90                       _MakeLexTokenForKeyword("map"))
     91     self.assertEquals(self._SingleTokenForInput("associated"),
     92                       _MakeLexTokenForKeyword("associated"))
     93 
     94   def testValidIdentifiers(self):
     95     """Tests identifiers."""
     96     self.assertEquals(self._SingleTokenForInput("abcd"),
     97                       _MakeLexToken("NAME", "abcd"))
     98     self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
     99                       _MakeLexToken("NAME", "AbC_d012_"))
    100     self.assertEquals(self._SingleTokenForInput("_0123"),
    101                       _MakeLexToken("NAME", "_0123"))
    102 
    103   def testInvalidIdentifiers(self):
    104     with self.assertRaisesRegexp(
    105         mojom.parse.lexer.LexError,
    106         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    107       self._TokensForInput("$abc")
    108     with self.assertRaisesRegexp(
    109         mojom.parse.lexer.LexError,
    110         r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
    111       self._TokensForInput("a$bc")
    112 
    113   def testDecimalIntegerConstants(self):
    114     self.assertEquals(self._SingleTokenForInput("0"),
    115                       _MakeLexToken("INT_CONST_DEC", "0"))
    116     self.assertEquals(self._SingleTokenForInput("1"),
    117                       _MakeLexToken("INT_CONST_DEC", "1"))
    118     self.assertEquals(self._SingleTokenForInput("123"),
    119                       _MakeLexToken("INT_CONST_DEC", "123"))
    120     self.assertEquals(self._SingleTokenForInput("10"),
    121                       _MakeLexToken("INT_CONST_DEC", "10"))
    122 
    123   def testValidTokens(self):
    124     """Tests valid tokens (which aren't tested elsewhere)."""
    125     # Keywords tested in |testValidKeywords|.
    126     # NAME tested in |testValidIdentifiers|.
    127     self.assertEquals(self._SingleTokenForInput("@123"),
    128                       _MakeLexToken("ORDINAL", "@123"))
    129     self.assertEquals(self._SingleTokenForInput("456"),
    130                       _MakeLexToken("INT_CONST_DEC", "456"))
    131     self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
    132                       _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
    133     self.assertEquals(self._SingleTokenForInput("123.456"),
    134                       _MakeLexToken("FLOAT_CONST", "123.456"))
    135     self.assertEquals(self._SingleTokenForInput("\"hello\""),
    136                       _MakeLexToken("STRING_LITERAL", "\"hello\""))
    137     self.assertEquals(self._SingleTokenForInput("+"),
    138                       _MakeLexToken("PLUS", "+"))
    139     self.assertEquals(self._SingleTokenForInput("-"),
    140                       _MakeLexToken("MINUS", "-"))
    141     self.assertEquals(self._SingleTokenForInput("&"),
    142                       _MakeLexToken("AMP", "&"))
    143     self.assertEquals(self._SingleTokenForInput("?"),
    144                       _MakeLexToken("QSTN", "?"))
    145     self.assertEquals(self._SingleTokenForInput("="),
    146                       _MakeLexToken("EQUALS", "="))
    147     self.assertEquals(self._SingleTokenForInput("=>"),
    148                       _MakeLexToken("RESPONSE", "=>"))
    149     self.assertEquals(self._SingleTokenForInput("("),
    150                       _MakeLexToken("LPAREN", "("))
    151     self.assertEquals(self._SingleTokenForInput(")"),
    152                       _MakeLexToken("RPAREN", ")"))
    153     self.assertEquals(self._SingleTokenForInput("["),
    154                       _MakeLexToken("LBRACKET", "["))
    155     self.assertEquals(self._SingleTokenForInput("]"),
    156                       _MakeLexToken("RBRACKET", "]"))
    157     self.assertEquals(self._SingleTokenForInput("{"),
    158                       _MakeLexToken("LBRACE", "{"))
    159     self.assertEquals(self._SingleTokenForInput("}"),
    160                       _MakeLexToken("RBRACE", "}"))
    161     self.assertEquals(self._SingleTokenForInput("<"),
    162                       _MakeLexToken("LANGLE", "<"))
    163     self.assertEquals(self._SingleTokenForInput(">"),
    164                       _MakeLexToken("RANGLE", ">"))
    165     self.assertEquals(self._SingleTokenForInput(";"),
    166                       _MakeLexToken("SEMI", ";"))
    167     self.assertEquals(self._SingleTokenForInput(","),
    168                       _MakeLexToken("COMMA", ","))
    169     self.assertEquals(self._SingleTokenForInput("."),
    170                       _MakeLexToken("DOT", "."))
    171 
    172   def _TokensForInput(self, input_string):
    173     """Gets a list of tokens for the given input string."""
    174     lexer = self._zygote_lexer.clone()
    175     lexer.input(input_string)
    176     rv = []
    177     while True:
    178       tok = lexer.token()
    179       if not tok:
    180         return rv
    181       rv.append(tok)
    182 
    183   def _SingleTokenForInput(self, input_string):
    184     """Gets the single token for the given input string. (Raises an exception if
    185     the input string does not result in exactly one token.)"""
    186     toks = self._TokensForInput(input_string)
    187     assert len(toks) == 1
    188     return toks[0]
    189 
    190 
    191 if __name__ == "__main__":
    192   unittest.main()
    193