File indexing completed on 2024-04-14 05:34:18
0001 import doxyqml.lexer as lexer 0002 0003 from doxyqml.qmlclass import QmlComponent, QmlArgument, QmlEnum, QmlEnumerator, QmlProperty, QmlFunction, QmlSignal, QmlAttribute 0004 0005 0006 class QmlParserError(Exception): 0007 def __init__(self, msg, token): 0008 Exception.__init__(self, msg) 0009 self.token = token 0010 0011 0012 class QmlParserUnexpectedTokenError(QmlParserError): 0013 def __init__(self, token): 0014 QmlParserError.__init__(self, "Unexpected token: {}".format(str(token)), token) 0015 0016 0017 def parse_class_definition(reader, cls, parse_sub_classes = True): 0018 token = reader.consume_wo_comments() 0019 if token.type != lexer.BLOCK_START: 0020 raise QmlParserError("Expected '{' after base class name", token) 0021 last_comment_token = None 0022 while not reader.at_end(): 0023 token = reader.consume() 0024 if is_comment_token(token): 0025 if last_comment_token: 0026 cls.add_element(last_comment_token.value) 0027 last_comment_token = token 0028 elif token.type == lexer.KEYWORD: 0029 parse_class_content(reader, cls, token, last_comment_token) 0030 last_comment_token = None 0031 elif token.type == lexer.COMPONENT and parse_sub_classes: 0032 parse_class_component(reader, cls, token, last_comment_token) 0033 last_comment_token = None 0034 elif token.type == lexer.ATTRIBUTE: 0035 parse_class_attribute(reader, cls, token, last_comment_token) 0036 last_comment_token = None 0037 elif token.type == lexer.BLOCK_START: 0038 skip_block(reader) 0039 elif token.type == lexer.BLOCK_END: 0040 break 0041 if last_comment_token: 0042 cls.add_element(last_comment_token.value) 0043 0044 0045 def parse_class_content(reader, cls, token, doc_token): 0046 keyword = token.value 0047 if keyword.endswith("property"): 0048 obj = parse_property(reader, keyword) 0049 elif keyword == "function": 0050 obj = parse_function(reader) 0051 elif keyword == "signal": 0052 obj = parse_signal(reader) 0053 elif keyword == "enum": 0054 obj = parse_enum(reader) 0055 else: 0056 raise QmlParserError("Unknown keyword '%s'" % keyword, token) 0057 if doc_token is not None: 0058 obj.doc = doc_token.value 0059 obj.doc_is_inline = (doc_token.type == lexer.ICOMMENT) 0060 if obj.name.startswith('_'): 0061 return 0062 cls.add_element(obj) 0063 0064 0065 def parse_class_component(reader, cls, token, doc_token): 0066 obj = QmlComponent(token.value) 0067 parse_class_definition(reader, obj) 0068 0069 if doc_token is not None: 0070 obj.comment = doc_token.value 0071 0072 cls.add_element(obj) 0073 0074 0075 def parse_class_attribute(reader, cls, token, doc_token) -> QmlAttribute: 0076 obj = QmlAttribute() 0077 obj.name = token.value 0078 0079 # Should be colon 0080 token = reader.consume_expecting(lexer.CHAR) 0081 token = reader.consume() 0082 if token.type == lexer.BLOCK_START or token.type == lexer.ARRAY_START: 0083 skip_block(reader) 0084 else: 0085 obj.value = token.value 0086 0087 if doc_token is not None: 0088 obj.doc = doc_token.value 0089 0090 cls.add_element(obj) 0091 0092 0093 def parse_property(reader, property_token_value) -> QmlProperty: 0094 prop = QmlProperty() 0095 prop.is_default = property_token_value.startswith("default") 0096 prop.is_readonly = property_token_value.startswith("readonly") 0097 0098 token = reader.consume_expecting(lexer.ELEMENT) 0099 prop.type = token.value 0100 0101 token = reader.consume_expecting(lexer.ELEMENT) 0102 prop.name = token.value 0103 return prop 0104 0105 0106 def parse_function(reader) -> QmlFunction: 0107 obj = QmlFunction() 0108 token = reader.consume_expecting(lexer.ELEMENT) 0109 obj.name = token.value 0110 0111 reader.consume_expecting(lexer.CHAR, "(") 0112 obj.args = parse_arguments(reader) 0113 return obj 0114 0115 0116 def parse_enum(reader) -> QmlEnum: 0117 obj = QmlEnum() 0118 token = reader.consume_expecting(lexer.ELEMENT) 0119 obj.name = token.value 0120 0121 reader.consume_expecting(lexer.BLOCK_START) 0122 prev_comment_token = None 0123 prev_enumerator = None 0124 0125 while not reader.at_end(): 0126 token = reader.consume() 0127 if is_comment_token(token): 0128 if token.type == lexer.ICOMMENT: 0129 if prev_enumerator == None: 0130 # this is still for the enum itself 0131 obj.doc = token.value 0132 obj.doc_is_inline = True 0133 else: 0134 # for last enum 0135 prev_enumerator.doc = token.value 0136 prev_enumerator.doc_is_inline = True 0137 else: 0138 prev_comment_token = token 0139 elif token.type == lexer.BLOCK_END: 0140 break 0141 elif token.type == lexer.ELEMENT: 0142 if prev_enumerator: 0143 obj.enumerators.append(prev_enumerator) 0144 prev_enumerator, block_end = parse_enumerator(reader, token.value) 0145 if prev_comment_token: 0146 prev_enumerator.doc = prev_comment_token.value 0147 prev_comment_token = None 0148 if block_end: 0149 break 0150 continue 0151 elif token.type == lexer.CHAR and token.value == ",": 0152 continue 0153 else: 0154 raise QmlParserUnexpectedTokenError(token) 0155 0156 if prev_enumerator: 0157 prev_enumerator.is_last = True 0158 obj.enumerators.append(prev_enumerator) 0159 return obj 0160 0161 0162 def parse_enumerator(reader, name): 0163 obj = QmlEnumerator(name) 0164 0165 block_end = False 0166 0167 while not reader.at_end(): 0168 token = reader.consume() 0169 if is_comment_token(token): 0170 if token.type == lexer.ICOMMENT: 0171 # we could catch the inline comment for the last item here 0172 obj.doc = token.value 0173 obj.doc_is_inline = True 0174 elif token.type == lexer.BLOCK_END: 0175 block_end = True 0176 break 0177 elif token.type == lexer.CHAR: 0178 if token.value == ",": 0179 break 0180 elif token.value == "=": 0181 token = reader.consume_expecting(lexer.ELEMENT) 0182 obj.initializer = token.value 0183 continue 0184 else: 0185 raise QmlParserUnexpectedTokenError(token) 0186 0187 return obj, block_end 0188 0189 0190 def parse_signal(reader): 0191 obj = QmlSignal() 0192 token = reader.consume_expecting(lexer.ELEMENT) 0193 obj.name = token.value 0194 0195 idx = reader.idx 0196 token = reader.consume_wo_comments() 0197 if token.type == lexer.CHAR and token.value == "(": 0198 obj.args = parse_arguments(reader, typed=True) 0199 else: 0200 reader.idx = idx 0201 return obj 0202 0203 0204 def parse_arguments(reader, typed=False): 0205 token = reader.consume_wo_comments() 0206 spread = False 0207 if token.type == lexer.CHAR and token.value == ")": 0208 return [] 0209 elif token.type == lexer.ELLIPSES: 0210 token = reader.consume_expecting(lexer.ELEMENT) 0211 spread = True 0212 elif token.type != lexer.ELEMENT: 0213 raise QmlParserUnexpectedTokenError(token) 0214 0215 args = [] 0216 while True: 0217 if typed: 0218 arg_type = token.value 0219 token = reader.consume_expecting(lexer.ELEMENT) 0220 arg = QmlArgument(token.value) 0221 arg.type = arg_type 0222 elif spread: 0223 arg = QmlArgument(token.value) 0224 arg.spread = True 0225 spread = False 0226 else: 0227 arg = QmlArgument(token.value) 0228 0229 token = reader.consume_expecting(lexer.CHAR) 0230 0231 if token.value == "=": 0232 default_value = "" 0233 while True: 0234 token = reader.consume_expecting( 0235 [lexer.ELEMENT, lexer.CHAR, lexer.STRING, lexer.BLOCK_START, lexer.ARRAY_START] 0236 ) 0237 if token.value in (")", ","): 0238 break 0239 if token.value == "{": 0240 token = reader.consume_expecting(lexer.BLOCK_END) 0241 default_value += "{}" 0242 elif token.value == "[": 0243 token = reader.consume_expecting(lexer.ARRAY_END) 0244 default_value += "[]" 0245 else: 0246 default_value += token.value 0247 # token = reader.consume_expecting(lexer.CHAR) 0248 arg.default_value = default_value 0249 args.append(arg) 0250 0251 if token.value == ":": 0252 token = reader.consume_expecting(lexer.ELEMENT) 0253 arg.type = token.value 0254 token = reader.consume_expecting(lexer.CHAR) 0255 0256 if token.value == ")": 0257 return args 0258 elif token.value != ",": 0259 raise QmlParserUnexpectedTokenError(token) 0260 0261 token = reader.consume_expecting([lexer.ELEMENT, lexer.ELLIPSES]) 0262 0263 0264 if token.type == lexer.ELLIPSES: 0265 token = reader.consume_expecting(lexer.ELEMENT) 0266 spread = True 0267 0268 0269 def skip_block(reader): 0270 count = 1 0271 while True: 0272 token = reader.consume_wo_comments() 0273 if token.type == lexer.BLOCK_START: 0274 count += 1 0275 elif token.type == lexer.BLOCK_END: 0276 count -= 1 0277 if count == 0: 0278 return 0279 0280 0281 def parse_header(reader, cls): 0282 while not reader.at_end(): 0283 token = reader.consume() 0284 if is_comment_token(token): 0285 cls.add_header_comment(token.value) 0286 elif token.type == lexer.IMPORT: 0287 cls.add_import(token.value) 0288 elif token.type == lexer.PRAGMA: 0289 cls.add_pragma(token.value) 0290 elif token.type == lexer.COMPONENT: 0291 cls.base_name = token.value 0292 return 0293 else: 0294 raise QmlParserUnexpectedTokenError(token) 0295 0296 0297 def parse_footer(reader, cls): 0298 while not reader.at_end(): 0299 token = reader.consume() 0300 if is_comment_token(token): 0301 cls.add_footer_comment(token.value) 0302 else: 0303 raise QmlParserUnexpectedTokenError(token) 0304 0305 0306 def is_comment_token(token): 0307 return token.type in (lexer.COMMENT, lexer.ICOMMENT) 0308 0309 0310 class TokenReader(object): 0311 def __init__(self, tokens): 0312 self.tokens = tokens 0313 self.idx = 0 0314 0315 def consume(self): 0316 token = self.tokens[self.idx] 0317 self.idx += 1 0318 return token 0319 0320 def consume_wo_comments(self): 0321 while True: 0322 token = self.consume() 0323 if not is_comment_token(token): 0324 return token 0325 0326 def consume_expecting(self, expected_types, value=None): 0327 token = self.consume_wo_comments() 0328 if type(expected_types) is list: 0329 if token.type not in expected_types: 0330 raise QmlParserError( 0331 "Expected token of type '%s', got '%s' instead" % (expected_types, token.type), token) 0332 elif token.type != expected_types: 0333 raise QmlParserError( 0334 "Expected token of type '%s', got '%s' instead" % (expected_types, token.type), token) 0335 if value is not None and token.value != value: 0336 raise QmlParserError("Expected token with value '%s', got '%s' instead" % ( 0337 value, token.value), token) 0338 return token 0339 0340 def at_end(self): 0341 return self.idx == len(self.tokens) 0342 0343 0344 def parse(tokens, cls, parse_sub_classes = True): 0345 reader = TokenReader(tokens) 0346 parse_header(reader, cls) 0347 parse_class_definition(reader, cls, parse_sub_classes) 0348 parse_footer(reader, cls)