Home | History | Annotate | Download | only in closure_linter
      1 #!/usr/bin/env python
      2 #
      3 # Copyright 2012 The Closure Linter Authors. All Rights Reserved.
      4 # Licensed under the Apache License, Version 2.0 (the "License");
      5 # you may not use this file except in compliance with the License.
      6 # You may obtain a copy of the License at
      7 #
      8 #      http://www.apache.org/licenses/LICENSE-2.0
      9 #
     10 # Unless required by applicable law or agreed to in writing, software
     11 # distributed under the License is distributed on an "AS-IS" BASIS,
     12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 # See the License for the specific language governing permissions and
     14 # limitations under the License.
     15 
     16 """Unit tests for the scopeutil module."""
     17 
     18 # Allow non-Google copyright
     19 # pylint: disable=g-bad-file-header
     20 
     21 __author__ = ('nnaze (at] google.com (Nathan Naze)')
     22 
     23 import unittest as googletest
     24 
     25 from closure_linter import ecmametadatapass
     26 from closure_linter import javascripttokens
     27 from closure_linter import testutil
     28 from closure_linter import tokenutil
     29 
     30 
     31 class FakeToken(object):
     32   pass
     33 
     34 
     35 class TokenUtilTest(googletest.TestCase):
     36 
     37   def testGetTokenRange(self):
     38 
     39     a = FakeToken()
     40     b = FakeToken()
     41     c = FakeToken()
     42     d = FakeToken()
     43     e = FakeToken()
     44 
     45     a.next = b
     46     b.next = c
     47     c.next = d
     48 
     49     self.assertEquals([a, b, c, d], tokenutil.GetTokenRange(a, d))
     50 
     51     # This is an error as e does not come after a in the token chain.
     52     self.assertRaises(Exception, lambda: tokenutil.GetTokenRange(a, e))
     53 
     54   def testTokensToString(self):
     55 
     56     a = FakeToken()
     57     b = FakeToken()
     58     c = FakeToken()
     59     d = FakeToken()
     60     e = FakeToken()
     61 
     62     a.string = 'aaa'
     63     b.string = 'bbb'
     64     c.string = 'ccc'
     65     d.string = 'ddd'
     66     e.string = 'eee'
     67 
     68     a.line_number = 5
     69     b.line_number = 6
     70     c.line_number = 6
     71     d.line_number = 10
     72     e.line_number = 11
     73 
     74     self.assertEquals(
     75         'aaa\nbbbccc\n\n\n\nddd\neee',
     76         tokenutil.TokensToString([a, b, c, d, e]))
     77 
     78     self.assertEquals(
     79         'ddd\neee\naaa\nbbbccc',
     80         tokenutil.TokensToString([d, e, a, b, c]),
     81         'Neighboring tokens not in line_number order should have a newline '
     82         'between them.')
     83 
     84   def testGetPreviousCodeToken(self):
     85 
     86     tokens = testutil.TokenizeSource("""
     87 start1. // comment
     88     /* another comment */
     89     end1
     90 """)
     91 
     92     def _GetTokenStartingWith(token_starts_with):
     93       for t in tokens:
     94         if t.string.startswith(token_starts_with):
     95           return t
     96 
     97     self.assertEquals(
     98         None,
     99         tokenutil.GetPreviousCodeToken(_GetTokenStartingWith('start1')))
    100 
    101     self.assertEquals(
    102         '.',
    103         tokenutil.GetPreviousCodeToken(_GetTokenStartingWith('end1')).string)
    104 
    105     self.assertEquals(
    106         'start1',
    107         tokenutil.GetPreviousCodeToken(_GetTokenStartingWith('.')).string)
    108 
    109   def testGetNextCodeToken(self):
    110 
    111     tokens = testutil.TokenizeSource("""
    112 start1. // comment
    113     /* another comment */
    114     end1
    115 """)
    116 
    117     def _GetTokenStartingWith(token_starts_with):
    118       for t in tokens:
    119         if t.string.startswith(token_starts_with):
    120           return t
    121 
    122     self.assertEquals(
    123         '.',
    124         tokenutil.GetNextCodeToken(_GetTokenStartingWith('start1')).string)
    125 
    126     self.assertEquals(
    127         'end1',
    128         tokenutil.GetNextCodeToken(_GetTokenStartingWith('.')).string)
    129 
    130     self.assertEquals(
    131         None,
    132         tokenutil.GetNextCodeToken(_GetTokenStartingWith('end1')))
    133 
    134   def testGetIdentifierStart(self):
    135 
    136     tokens = testutil.TokenizeSource("""
    137 start1 . // comment
    138     prototype. /* another comment */
    139     end1
    140 
    141 ['edge'][case].prototype.
    142     end2 = function() {}
    143 """)
    144 
    145     def _GetTokenStartingWith(token_starts_with):
    146       for t in tokens:
    147         if t.string.startswith(token_starts_with):
    148           return t
    149 
    150     self.assertEquals(
    151         'start1',
    152         tokenutil.GetIdentifierStart(_GetTokenStartingWith('end1')).string)
    153 
    154     self.assertEquals(
    155         'start1',
    156         tokenutil.GetIdentifierStart(_GetTokenStartingWith('start1')).string)
    157 
    158     self.assertEquals(
    159         None,
    160         tokenutil.GetIdentifierStart(_GetTokenStartingWith('end2')))
    161 
    162   def testInsertTokenBefore(self):
    163 
    164     self.AssertInsertTokenAfterBefore(False)
    165 
    166   def testInsertTokenAfter(self):
    167 
    168     self.AssertInsertTokenAfterBefore(True)
    169 
    170   def AssertInsertTokenAfterBefore(self, after):
    171 
    172     new_token = javascripttokens.JavaScriptToken(
    173         'a', javascripttokens.JavaScriptTokenType.IDENTIFIER, 1, 1)
    174 
    175     existing_token1 = javascripttokens.JavaScriptToken(
    176         'var', javascripttokens.JavaScriptTokenType.KEYWORD, 1, 1)
    177     existing_token1.start_index = 0
    178     existing_token1.metadata = ecmametadatapass.EcmaMetaData()
    179 
    180     existing_token2 = javascripttokens.JavaScriptToken(
    181         ' ', javascripttokens.JavaScriptTokenType.WHITESPACE, 1, 1)
    182     existing_token2.start_index = 3
    183     existing_token2.metadata = ecmametadatapass.EcmaMetaData()
    184     existing_token2.metadata.last_code = existing_token1
    185 
    186     existing_token1.next = existing_token2
    187     existing_token2.previous = existing_token1
    188 
    189     if after:
    190       tokenutil.InsertTokenAfter(new_token, existing_token1)
    191     else:
    192       tokenutil.InsertTokenBefore(new_token, existing_token2)
    193 
    194     self.assertEquals(existing_token1, new_token.previous)
    195     self.assertEquals(existing_token2, new_token.next)
    196 
    197     self.assertEquals(new_token, existing_token1.next)
    198     self.assertEquals(new_token, existing_token2.previous)
    199 
    200     self.assertEquals(existing_token1, new_token.metadata.last_code)
    201     self.assertEquals(new_token, existing_token2.metadata.last_code)
    202 
    203     self.assertEquals(0, existing_token1.start_index)
    204     self.assertEquals(3, new_token.start_index)
    205     self.assertEquals(4, existing_token2.start_index)
    206 
    207   def testGetIdentifierForToken(self):
    208 
    209     tokens = testutil.TokenizeSource("""
    210 start1.abc.def.prototype.
    211   onContinuedLine
    212 
    213 (start2.abc.def
    214   .hij.klm
    215   .nop)
    216 
    217 start3.abc.def
    218    .hij = function() {};
    219 
    220 // An absurd multi-liner.
    221 start4.abc.def.
    222    hij.
    223    klm = function() {};
    224 
    225 start5 . aaa . bbb . ccc
    226   shouldntBePartOfThePreviousSymbol
    227 
    228 start6.abc.def ghi.shouldntBePartOfThePreviousSymbol
    229 
    230 var start7 = 42;
    231 
    232 function start8() {
    233 
    234 }
    235 
    236 start9.abc. // why is there a comment here?
    237   def /* another comment */
    238   shouldntBePart
    239 
    240 start10.abc // why is there a comment here?
    241   .def /* another comment */
    242   shouldntBePart
    243 
    244 start11.abc. middle1.shouldNotBeIdentifier
    245 """)
    246 
    247     def _GetTokenStartingWith(token_starts_with):
    248       for t in tokens:
    249         if t.string.startswith(token_starts_with):
    250           return t
    251 
    252     self.assertEquals(
    253         'start1.abc.def.prototype.onContinuedLine',
    254         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start1')))
    255 
    256     self.assertEquals(
    257         'start2.abc.def.hij.klm.nop',
    258         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start2')))
    259 
    260     self.assertEquals(
    261         'start3.abc.def.hij',
    262         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start3')))
    263 
    264     self.assertEquals(
    265         'start4.abc.def.hij.klm',
    266         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start4')))
    267 
    268     self.assertEquals(
    269         'start5.aaa.bbb.ccc',
    270         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start5')))
    271 
    272     self.assertEquals(
    273         'start6.abc.def',
    274         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start6')))
    275 
    276     self.assertEquals(
    277         'start7',
    278         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start7')))
    279 
    280     self.assertEquals(
    281         'start8',
    282         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start8')))
    283 
    284     self.assertEquals(
    285         'start9.abc.def',
    286         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start9')))
    287 
    288     self.assertEquals(
    289         'start10.abc.def',
    290         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('start10')))
    291 
    292     self.assertIsNone(
    293         tokenutil.GetIdentifierForToken(_GetTokenStartingWith('middle1')))
    294 
    295 
    296 if __name__ == '__main__':
    297   googletest.main()
    298