File indexing completed on 2024-05-05 05:43:50

0001 from unittest import TestCase
0002 
0003 from doxyqml.lexer import Lexer, Token, IMPORT, PRAGMA, STRING, COMMENT, KEYWORD, ELEMENT, \
0004     BLOCK_START, BLOCK_END, COMPONENT, CHAR
0005 
0006 
0007 class LexerTestCase(TestCase):
0008     def test_import(self):
0009         src = "import foo\n import bar"
0010         lexer = Lexer(src)
0011         lexer.tokenize()
0012         self.assertEqual(lexer.tokens[0], Token(IMPORT, "import foo", 0, 0))
0013         self.assertEqual(lexer.tokens[1], Token(IMPORT, "import bar", 12, 1))
0014 
0015     def test_pragma(self):
0016         src = "pragma foo\n pragma bar"
0017         lexer = Lexer(src)
0018         lexer.tokenize()
0019         self.assertEqual(lexer.tokens[0], Token(PRAGMA, "pragma foo", 0, 0))
0020         self.assertEqual(lexer.tokens[1], Token(PRAGMA, "pragma bar", 12, 1))
0021 
0022     def test_string(self):
0023         src = r'"hello" "world!" "new\nline" "qu\"ote"'
0024         lexer = Lexer(src)
0025         lexer.tokenize()
0026         self.assertEqual(lexer.tokens[0], Token(STRING, '"hello"', 0, 0))
0027         self.assertEqual(lexer.tokens[1], Token(STRING, '"world!"', 8, 8))
0028         self.assertEqual(lexer.tokens[2], Token(STRING, r'"new\nline"', 17, 17))
0029         self.assertEqual(lexer.tokens[3], Token(STRING, r'"qu\"ote"', 29, 29))
0030 
0031     def test_single_line_comment(self):
0032         src = "// hello\nimport bob"
0033         lexer = Lexer(src)
0034         lexer.tokenize()
0035         self.assertEqual(lexer.tokens[0], Token(COMMENT, '// hello', 0, 0))
0036         self.assertEqual(lexer.tokens[1], Token(IMPORT, 'import bob', 9, 0))
0037 
0038     def test_multi_line_comment(self):
0039         src = "/* hello\nworld *//* good bye\nworld */"
0040         lexer = Lexer(src)
0041         lexer.tokenize()
0042         self.assertEqual(lexer.tokens[0], Token(COMMENT, '/* hello\nworld */', 0, 0))
0043         self.assertEqual(lexer.tokens[1], Token(COMMENT, '/* good bye\nworld */', 17, 8))
0044 
0045     def test_property_named_property(self):
0046         src = "Item { property var property }"
0047         lexer = Lexer(src)
0048         lexer.tokenize()
0049         self.assertEqual(lexer.tokens[0], Token(COMPONENT, 'Item', 0, 0))
0050         self.assertEqual(lexer.tokens[1], Token(BLOCK_START, '{', 5, 5))
0051         self.assertEqual(lexer.tokens[2], Token(KEYWORD, 'property', 7, 7))
0052         self.assertEqual(lexer.tokens[3], Token(ELEMENT, 'var', 16, 16))
0053         self.assertEqual(lexer.tokens[4], Token(ELEMENT, 'property', 20, 20))
0054 
0055     def test_curly_brackets_in_function(self):
0056         src = "function foo(){ var bar = ' }' }"
0057         lexer = Lexer(src)
0058         lexer.tokenize()
0059         self.assertEqual(lexer.tokens[9], Token(BLOCK_END, '}', 31, 31))