Funktionierender Prototyp des Serious Games zur Vermittlung von Wissen zu Software-Engineering-Arbeitsmodellen.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

c_parser.py 72KB

1 year ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936
  1. #------------------------------------------------------------------------------
  2. # pycparser: c_parser.py
  3. #
  4. # CParser class: Parser and AST builder for the C language
  5. #
  6. # Eli Bendersky [https://eli.thegreenplace.net/]
  7. # License: BSD
  8. #------------------------------------------------------------------------------
  9. from .ply import yacc
  10. from . import c_ast
  11. from .c_lexer import CLexer
  12. from .plyparser import PLYParser, ParseError, parameterized, template
  13. from .ast_transforms import fix_switch_cases, fix_atomic_specifiers
  14. @template
  15. class CParser(PLYParser):
  16. def __init__(
  17. self,
  18. lex_optimize=True,
  19. lexer=CLexer,
  20. lextab='pycparser.lextab',
  21. yacc_optimize=True,
  22. yacctab='pycparser.yacctab',
  23. yacc_debug=False,
  24. taboutputdir=''):
  25. """ Create a new CParser.
  26. Some arguments for controlling the debug/optimization
  27. level of the parser are provided. The defaults are
  28. tuned for release/performance mode.
  29. The simple rules for using them are:
  30. *) When tweaking CParser/CLexer, set these to False
  31. *) When releasing a stable parser, set to True
  32. lex_optimize:
  33. Set to False when you're modifying the lexer.
  34. Otherwise, changes in the lexer won't be used, if
  35. some lextab.py file exists.
  36. When releasing with a stable lexer, set to True
  37. to save the re-generation of the lexer table on
  38. each run.
  39. lexer:
  40. Set this parameter to define the lexer to use if
  41. you're not using the default CLexer.
  42. lextab:
  43. Points to the lex table that's used for optimized
  44. mode. Only if you're modifying the lexer and want
  45. some tests to avoid re-generating the table, make
  46. this point to a local lex table file (that's been
  47. earlier generated with lex_optimize=True)
  48. yacc_optimize:
  49. Set to False when you're modifying the parser.
  50. Otherwise, changes in the parser won't be used, if
  51. some parsetab.py file exists.
  52. When releasing with a stable parser, set to True
  53. to save the re-generation of the parser table on
  54. each run.
  55. yacctab:
  56. Points to the yacc table that's used for optimized
  57. mode. Only if you're modifying the parser, make
  58. this point to a local yacc table file
  59. yacc_debug:
  60. Generate a parser.out file that explains how yacc
  61. built the parsing table from the grammar.
  62. taboutputdir:
  63. Set this parameter to control the location of generated
  64. lextab and yacctab files.
  65. """
  66. self.clex = lexer(
  67. error_func=self._lex_error_func,
  68. on_lbrace_func=self._lex_on_lbrace_func,
  69. on_rbrace_func=self._lex_on_rbrace_func,
  70. type_lookup_func=self._lex_type_lookup_func)
  71. self.clex.build(
  72. optimize=lex_optimize,
  73. lextab=lextab,
  74. outputdir=taboutputdir)
  75. self.tokens = self.clex.tokens
  76. rules_with_opt = [
  77. 'abstract_declarator',
  78. 'assignment_expression',
  79. 'declaration_list',
  80. 'declaration_specifiers_no_type',
  81. 'designation',
  82. 'expression',
  83. 'identifier_list',
  84. 'init_declarator_list',
  85. 'id_init_declarator_list',
  86. 'initializer_list',
  87. 'parameter_type_list',
  88. 'block_item_list',
  89. 'type_qualifier_list',
  90. 'struct_declarator_list'
  91. ]
  92. for rule in rules_with_opt:
  93. self._create_opt_rule(rule)
  94. self.cparser = yacc.yacc(
  95. module=self,
  96. start='translation_unit_or_empty',
  97. debug=yacc_debug,
  98. optimize=yacc_optimize,
  99. tabmodule=yacctab,
  100. outputdir=taboutputdir)
  101. # Stack of scopes for keeping track of symbols. _scope_stack[-1] is
  102. # the current (topmost) scope. Each scope is a dictionary that
  103. # specifies whether a name is a type. If _scope_stack[n][name] is
  104. # True, 'name' is currently a type in the scope. If it's False,
  105. # 'name' is used in the scope but not as a type (for instance, if we
  106. # saw: int name;
  107. # If 'name' is not a key in _scope_stack[n] then 'name' was not defined
  108. # in this scope at all.
  109. self._scope_stack = [dict()]
  110. # Keeps track of the last token given to yacc (the lookahead token)
  111. self._last_yielded_token = None
  112. def parse(self, text, filename='', debug=False):
  113. """ Parses C code and returns an AST.
  114. text:
  115. A string containing the C source code
  116. filename:
  117. Name of the file being parsed (for meaningful
  118. error messages)
  119. debug:
  120. Debug flag to YACC
  121. """
  122. self.clex.filename = filename
  123. self.clex.reset_lineno()
  124. self._scope_stack = [dict()]
  125. self._last_yielded_token = None
  126. return self.cparser.parse(
  127. input=text,
  128. lexer=self.clex,
  129. debug=debug)
  130. ######################-- PRIVATE --######################
  131. def _push_scope(self):
  132. self._scope_stack.append(dict())
  133. def _pop_scope(self):
  134. assert len(self._scope_stack) > 1
  135. self._scope_stack.pop()
  136. def _add_typedef_name(self, name, coord):
  137. """ Add a new typedef name (ie a TYPEID) to the current scope
  138. """
  139. if not self._scope_stack[-1].get(name, True):
  140. self._parse_error(
  141. "Typedef %r previously declared as non-typedef "
  142. "in this scope" % name, coord)
  143. self._scope_stack[-1][name] = True
  144. def _add_identifier(self, name, coord):
  145. """ Add a new object, function, or enum member name (ie an ID) to the
  146. current scope
  147. """
  148. if self._scope_stack[-1].get(name, False):
  149. self._parse_error(
  150. "Non-typedef %r previously declared as typedef "
  151. "in this scope" % name, coord)
  152. self._scope_stack[-1][name] = False
  153. def _is_type_in_scope(self, name):
  154. """ Is *name* a typedef-name in the current scope?
  155. """
  156. for scope in reversed(self._scope_stack):
  157. # If name is an identifier in this scope it shadows typedefs in
  158. # higher scopes.
  159. in_scope = scope.get(name)
  160. if in_scope is not None: return in_scope
  161. return False
  162. def _lex_error_func(self, msg, line, column):
  163. self._parse_error(msg, self._coord(line, column))
  164. def _lex_on_lbrace_func(self):
  165. self._push_scope()
  166. def _lex_on_rbrace_func(self):
  167. self._pop_scope()
  168. def _lex_type_lookup_func(self, name):
  169. """ Looks up types that were previously defined with
  170. typedef.
  171. Passed to the lexer for recognizing identifiers that
  172. are types.
  173. """
  174. is_type = self._is_type_in_scope(name)
  175. return is_type
  176. def _get_yacc_lookahead_token(self):
  177. """ We need access to yacc's lookahead token in certain cases.
  178. This is the last token yacc requested from the lexer, so we
  179. ask the lexer.
  180. """
  181. return self.clex.last_token
  182. # To understand what's going on here, read sections A.8.5 and
  183. # A.8.6 of K&R2 very carefully.
  184. #
  185. # A C type consists of a basic type declaration, with a list
  186. # of modifiers. For example:
  187. #
  188. # int *c[5];
  189. #
  190. # The basic declaration here is 'int c', and the pointer and
  191. # the array are the modifiers.
  192. #
  193. # Basic declarations are represented by TypeDecl (from module c_ast) and the
  194. # modifiers are FuncDecl, PtrDecl and ArrayDecl.
  195. #
  196. # The standard states that whenever a new modifier is parsed, it should be
  197. # added to the end of the list of modifiers. For example:
  198. #
  199. # K&R2 A.8.6.2: Array Declarators
  200. #
  201. # In a declaration T D where D has the form
  202. # D1 [constant-expression-opt]
  203. # and the type of the identifier in the declaration T D1 is
  204. # "type-modifier T", the type of the
  205. # identifier of D is "type-modifier array of T"
  206. #
  207. # This is what this method does. The declarator it receives
  208. # can be a list of declarators ending with TypeDecl. It
  209. # tacks the modifier to the end of this list, just before
  210. # the TypeDecl.
  211. #
  212. # Additionally, the modifier may be a list itself. This is
  213. # useful for pointers, that can come as a chain from the rule
  214. # p_pointer. In this case, the whole modifier list is spliced
  215. # into the new location.
  216. def _type_modify_decl(self, decl, modifier):
  217. """ Tacks a type modifier on a declarator, and returns
  218. the modified declarator.
  219. Note: the declarator and modifier may be modified
  220. """
  221. #~ print '****'
  222. #~ decl.show(offset=3)
  223. #~ modifier.show(offset=3)
  224. #~ print '****'
  225. modifier_head = modifier
  226. modifier_tail = modifier
  227. # The modifier may be a nested list. Reach its tail.
  228. while modifier_tail.type:
  229. modifier_tail = modifier_tail.type
  230. # If the decl is a basic type, just tack the modifier onto it.
  231. if isinstance(decl, c_ast.TypeDecl):
  232. modifier_tail.type = decl
  233. return modifier
  234. else:
  235. # Otherwise, the decl is a list of modifiers. Reach
  236. # its tail and splice the modifier onto the tail,
  237. # pointing to the underlying basic type.
  238. decl_tail = decl
  239. while not isinstance(decl_tail.type, c_ast.TypeDecl):
  240. decl_tail = decl_tail.type
  241. modifier_tail.type = decl_tail.type
  242. decl_tail.type = modifier_head
  243. return decl
  244. # Due to the order in which declarators are constructed,
  245. # they have to be fixed in order to look like a normal AST.
  246. #
  247. # When a declaration arrives from syntax construction, it has
  248. # these problems:
  249. # * The innermost TypeDecl has no type (because the basic
  250. # type is only known at the uppermost declaration level)
  251. # * The declaration has no variable name, since that is saved
  252. # in the innermost TypeDecl
  253. # * The typename of the declaration is a list of type
  254. # specifiers, and not a node. Here, basic identifier types
  255. # should be separated from more complex types like enums
  256. # and structs.
  257. #
  258. # This method fixes these problems.
  259. def _fix_decl_name_type(self, decl, typename):
  260. """ Fixes a declaration. Modifies decl.
  261. """
  262. # Reach the underlying basic type
  263. #
  264. type = decl
  265. while not isinstance(type, c_ast.TypeDecl):
  266. type = type.type
  267. decl.name = type.declname
  268. type.quals = decl.quals[:]
  269. # The typename is a list of types. If any type in this
  270. # list isn't an IdentifierType, it must be the only
  271. # type in the list (it's illegal to declare "int enum ..")
  272. # If all the types are basic, they're collected in the
  273. # IdentifierType holder.
  274. for tn in typename:
  275. if not isinstance(tn, c_ast.IdentifierType):
  276. if len(typename) > 1:
  277. self._parse_error(
  278. "Invalid multiple types specified", tn.coord)
  279. else:
  280. type.type = tn
  281. return decl
  282. if not typename:
  283. # Functions default to returning int
  284. #
  285. if not isinstance(decl.type, c_ast.FuncDecl):
  286. self._parse_error(
  287. "Missing type in declaration", decl.coord)
  288. type.type = c_ast.IdentifierType(
  289. ['int'],
  290. coord=decl.coord)
  291. else:
  292. # At this point, we know that typename is a list of IdentifierType
  293. # nodes. Concatenate all the names into a single list.
  294. #
  295. type.type = c_ast.IdentifierType(
  296. [name for id in typename for name in id.names],
  297. coord=typename[0].coord)
  298. return decl
  299. def _add_declaration_specifier(self, declspec, newspec, kind, append=False):
  300. """ Declaration specifiers are represented by a dictionary
  301. with the entries:
  302. * qual: a list of type qualifiers
  303. * storage: a list of storage type qualifiers
  304. * type: a list of type specifiers
  305. * function: a list of function specifiers
  306. * alignment: a list of alignment specifiers
  307. This method is given a declaration specifier, and a
  308. new specifier of a given kind.
  309. If `append` is True, the new specifier is added to the end of
  310. the specifiers list, otherwise it's added at the beginning.
  311. Returns the declaration specifier, with the new
  312. specifier incorporated.
  313. """
  314. spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[])
  315. if append:
  316. spec[kind].append(newspec)
  317. else:
  318. spec[kind].insert(0, newspec)
  319. return spec
  320. def _build_declarations(self, spec, decls, typedef_namespace=False):
  321. """ Builds a list of declarations all sharing the given specifiers.
  322. If typedef_namespace is true, each declared name is added
  323. to the "typedef namespace", which also includes objects,
  324. functions, and enum constants.
  325. """
  326. is_typedef = 'typedef' in spec['storage']
  327. declarations = []
  328. # Bit-fields are allowed to be unnamed.
  329. if decls[0].get('bitsize') is not None:
  330. pass
  331. # When redeclaring typedef names as identifiers in inner scopes, a
  332. # problem can occur where the identifier gets grouped into
  333. # spec['type'], leaving decl as None. This can only occur for the
  334. # first declarator.
  335. elif decls[0]['decl'] is None:
  336. if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
  337. not self._is_type_in_scope(spec['type'][-1].names[0]):
  338. coord = '?'
  339. for t in spec['type']:
  340. if hasattr(t, 'coord'):
  341. coord = t.coord
  342. break
  343. self._parse_error('Invalid declaration', coord)
  344. # Make this look as if it came from "direct_declarator:ID"
  345. decls[0]['decl'] = c_ast.TypeDecl(
  346. declname=spec['type'][-1].names[0],
  347. type=None,
  348. quals=None,
  349. align=spec['alignment'],
  350. coord=spec['type'][-1].coord)
  351. # Remove the "new" type's name from the end of spec['type']
  352. del spec['type'][-1]
  353. # A similar problem can occur where the declaration ends up looking
  354. # like an abstract declarator. Give it a name if this is the case.
  355. elif not isinstance(decls[0]['decl'], (
  356. c_ast.Enum, c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
  357. decls_0_tail = decls[0]['decl']
  358. while not isinstance(decls_0_tail, c_ast.TypeDecl):
  359. decls_0_tail = decls_0_tail.type
  360. if decls_0_tail.declname is None:
  361. decls_0_tail.declname = spec['type'][-1].names[0]
  362. del spec['type'][-1]
  363. for decl in decls:
  364. assert decl['decl'] is not None
  365. if is_typedef:
  366. declaration = c_ast.Typedef(
  367. name=None,
  368. quals=spec['qual'],
  369. storage=spec['storage'],
  370. type=decl['decl'],
  371. coord=decl['decl'].coord)
  372. else:
  373. declaration = c_ast.Decl(
  374. name=None,
  375. quals=spec['qual'],
  376. align=spec['alignment'],
  377. storage=spec['storage'],
  378. funcspec=spec['function'],
  379. type=decl['decl'],
  380. init=decl.get('init'),
  381. bitsize=decl.get('bitsize'),
  382. coord=decl['decl'].coord)
  383. if isinstance(declaration.type, (
  384. c_ast.Enum, c_ast.Struct, c_ast.Union,
  385. c_ast.IdentifierType)):
  386. fixed_decl = declaration
  387. else:
  388. fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
  389. # Add the type name defined by typedef to a
  390. # symbol table (for usage in the lexer)
  391. if typedef_namespace:
  392. if is_typedef:
  393. self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
  394. else:
  395. self._add_identifier(fixed_decl.name, fixed_decl.coord)
  396. fixed_decl = fix_atomic_specifiers(fixed_decl)
  397. declarations.append(fixed_decl)
  398. return declarations
  399. def _build_function_definition(self, spec, decl, param_decls, body):
  400. """ Builds a function definition.
  401. """
  402. if 'typedef' in spec['storage']:
  403. self._parse_error("Invalid typedef", decl.coord)
  404. declaration = self._build_declarations(
  405. spec=spec,
  406. decls=[dict(decl=decl, init=None)],
  407. typedef_namespace=True)[0]
  408. return c_ast.FuncDef(
  409. decl=declaration,
  410. param_decls=param_decls,
  411. body=body,
  412. coord=decl.coord)
  413. def _select_struct_union_class(self, token):
  414. """ Given a token (either STRUCT or UNION), selects the
  415. appropriate AST class.
  416. """
  417. if token == 'struct':
  418. return c_ast.Struct
  419. else:
  420. return c_ast.Union
  421. ##
  422. ## Precedence and associativity of operators
  423. ##
  424. # If this changes, c_generator.CGenerator.precedence_map needs to change as
  425. # well
  426. precedence = (
  427. ('left', 'LOR'),
  428. ('left', 'LAND'),
  429. ('left', 'OR'),
  430. ('left', 'XOR'),
  431. ('left', 'AND'),
  432. ('left', 'EQ', 'NE'),
  433. ('left', 'GT', 'GE', 'LT', 'LE'),
  434. ('left', 'RSHIFT', 'LSHIFT'),
  435. ('left', 'PLUS', 'MINUS'),
  436. ('left', 'TIMES', 'DIVIDE', 'MOD')
  437. )
  438. ##
  439. ## Grammar productions
  440. ## Implementation of the BNF defined in K&R2 A.13
  441. ##
  442. # Wrapper around a translation unit, to allow for empty input.
  443. # Not strictly part of the C99 Grammar, but useful in practice.
  444. def p_translation_unit_or_empty(self, p):
  445. """ translation_unit_or_empty : translation_unit
  446. | empty
  447. """
  448. if p[1] is None:
  449. p[0] = c_ast.FileAST([])
  450. else:
  451. p[0] = c_ast.FileAST(p[1])
  452. def p_translation_unit_1(self, p):
  453. """ translation_unit : external_declaration
  454. """
  455. # Note: external_declaration is already a list
  456. p[0] = p[1]
  457. def p_translation_unit_2(self, p):
  458. """ translation_unit : translation_unit external_declaration
  459. """
  460. p[1].extend(p[2])
  461. p[0] = p[1]
  462. # Declarations always come as lists (because they can be
  463. # several in one line), so we wrap the function definition
  464. # into a list as well, to make the return value of
  465. # external_declaration homogeneous.
  466. def p_external_declaration_1(self, p):
  467. """ external_declaration : function_definition
  468. """
  469. p[0] = [p[1]]
  470. def p_external_declaration_2(self, p):
  471. """ external_declaration : declaration
  472. """
  473. p[0] = p[1]
  474. def p_external_declaration_3(self, p):
  475. """ external_declaration : pp_directive
  476. | pppragma_directive
  477. """
  478. p[0] = [p[1]]
  479. def p_external_declaration_4(self, p):
  480. """ external_declaration : SEMI
  481. """
  482. p[0] = []
  483. def p_external_declaration_5(self, p):
  484. """ external_declaration : static_assert
  485. """
  486. p[0] = p[1]
  487. def p_static_assert_declaration(self, p):
  488. """ static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN
  489. | _STATIC_ASSERT LPAREN constant_expression RPAREN
  490. """
  491. if len(p) == 5:
  492. p[0] = [c_ast.StaticAssert(p[3], None, self._token_coord(p, 1))]
  493. else:
  494. p[0] = [c_ast.StaticAssert(p[3], p[5], self._token_coord(p, 1))]
  495. def p_pp_directive(self, p):
  496. """ pp_directive : PPHASH
  497. """
  498. self._parse_error('Directives not supported yet',
  499. self._token_coord(p, 1))
  500. def p_pppragma_directive(self, p):
  501. """ pppragma_directive : PPPRAGMA
  502. | PPPRAGMA PPPRAGMASTR
  503. """
  504. if len(p) == 3:
  505. p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
  506. else:
  507. p[0] = c_ast.Pragma("", self._token_coord(p, 1))
  508. # In function definitions, the declarator can be followed by
  509. # a declaration list, for old "K&R style" function definitios.
  510. def p_function_definition_1(self, p):
  511. """ function_definition : id_declarator declaration_list_opt compound_statement
  512. """
  513. # no declaration specifiers - 'int' becomes the default type
  514. spec = dict(
  515. qual=[],
  516. alignment=[],
  517. storage=[],
  518. type=[c_ast.IdentifierType(['int'],
  519. coord=self._token_coord(p, 1))],
  520. function=[])
  521. p[0] = self._build_function_definition(
  522. spec=spec,
  523. decl=p[1],
  524. param_decls=p[2],
  525. body=p[3])
  526. def p_function_definition_2(self, p):
  527. """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement
  528. """
  529. spec = p[1]
  530. p[0] = self._build_function_definition(
  531. spec=spec,
  532. decl=p[2],
  533. param_decls=p[3],
  534. body=p[4])
  535. # Note, according to C18 A.2.2 6.7.10 static_assert-declaration _Static_assert
  536. # is a declaration, not a statement. We additionally recognise it as a statement
  537. # to fix parsing of _Static_assert inside the functions.
  538. #
  539. def p_statement(self, p):
  540. """ statement : labeled_statement
  541. | expression_statement
  542. | compound_statement
  543. | selection_statement
  544. | iteration_statement
  545. | jump_statement
  546. | pppragma_directive
  547. | static_assert
  548. """
  549. p[0] = p[1]
  550. # A pragma is generally considered a decorator rather than an actual
  551. # statement. Still, for the purposes of analyzing an abstract syntax tree of
  552. # C code, pragma's should not be ignored and were previously treated as a
  553. # statement. This presents a problem for constructs that take a statement
  554. # such as labeled_statements, selection_statements, and
  555. # iteration_statements, causing a misleading structure in the AST. For
  556. # example, consider the following C code.
  557. #
  558. # for (int i = 0; i < 3; i++)
  559. # #pragma omp critical
  560. # sum += 1;
  561. #
  562. # This code will compile and execute "sum += 1;" as the body of the for
  563. # loop. Previous implementations of PyCParser would render the AST for this
  564. # block of code as follows:
  565. #
  566. # For:
  567. # DeclList:
  568. # Decl: i, [], [], []
  569. # TypeDecl: i, []
  570. # IdentifierType: ['int']
  571. # Constant: int, 0
  572. # BinaryOp: <
  573. # ID: i
  574. # Constant: int, 3
  575. # UnaryOp: p++
  576. # ID: i
  577. # Pragma: omp critical
  578. # Assignment: +=
  579. # ID: sum
  580. # Constant: int, 1
  581. #
  582. # This AST misleadingly takes the Pragma as the body of the loop and the
  583. # assignment then becomes a sibling of the loop.
  584. #
  585. # To solve edge cases like these, the pragmacomp_or_statement rule groups
  586. # a pragma and its following statement (which would otherwise be orphaned)
  587. # using a compound block, effectively turning the above code into:
  588. #
  589. # for (int i = 0; i < 3; i++) {
  590. # #pragma omp critical
  591. # sum += 1;
  592. # }
  593. def p_pragmacomp_or_statement(self, p):
  594. """ pragmacomp_or_statement : pppragma_directive statement
  595. | statement
  596. """
  597. if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
  598. p[0] = c_ast.Compound(
  599. block_items=[p[1], p[2]],
  600. coord=self._token_coord(p, 1))
  601. else:
  602. p[0] = p[1]
  603. # In C, declarations can come several in a line:
  604. # int x, *px, romulo = 5;
  605. #
  606. # However, for the AST, we will split them to separate Decl
  607. # nodes.
  608. #
  609. # This rule splits its declarations and always returns a list
  610. # of Decl nodes, even if it's one element long.
  611. #
  612. def p_decl_body(self, p):
  613. """ decl_body : declaration_specifiers init_declarator_list_opt
  614. | declaration_specifiers_no_type id_init_declarator_list_opt
  615. """
  616. spec = p[1]
  617. # p[2] (init_declarator_list_opt) is either a list or None
  618. #
  619. if p[2] is None:
  620. # By the standard, you must have at least one declarator unless
  621. # declaring a structure tag, a union tag, or the members of an
  622. # enumeration.
  623. #
  624. ty = spec['type']
  625. s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
  626. if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
  627. decls = [c_ast.Decl(
  628. name=None,
  629. quals=spec['qual'],
  630. align=spec['alignment'],
  631. storage=spec['storage'],
  632. funcspec=spec['function'],
  633. type=ty[0],
  634. init=None,
  635. bitsize=None,
  636. coord=ty[0].coord)]
  637. # However, this case can also occur on redeclared identifiers in
  638. # an inner scope. The trouble is that the redeclared type's name
  639. # gets grouped into declaration_specifiers; _build_declarations
  640. # compensates for this.
  641. #
  642. else:
  643. decls = self._build_declarations(
  644. spec=spec,
  645. decls=[dict(decl=None, init=None)],
  646. typedef_namespace=True)
  647. else:
  648. decls = self._build_declarations(
  649. spec=spec,
  650. decls=p[2],
  651. typedef_namespace=True)
  652. p[0] = decls
  653. # The declaration has been split to a decl_body sub-rule and
  654. # SEMI, because having them in a single rule created a problem
  655. # for defining typedefs.
  656. #
  657. # If a typedef line was directly followed by a line using the
  658. # type defined with the typedef, the type would not be
  659. # recognized. This is because to reduce the declaration rule,
  660. # the parser's lookahead asked for the token after SEMI, which
  661. # was the type from the next line, and the lexer had no chance
  662. # to see the updated type symbol table.
  663. #
  664. # Splitting solves this problem, because after seeing SEMI,
  665. # the parser reduces decl_body, which actually adds the new
  666. # type into the table to be seen by the lexer before the next
  667. # line is reached.
  668. def p_declaration(self, p):
  669. """ declaration : decl_body SEMI
  670. """
  671. p[0] = p[1]
  672. # Since each declaration is a list of declarations, this
  673. # rule will combine all the declarations and return a single
  674. # list
  675. #
  676. def p_declaration_list(self, p):
  677. """ declaration_list : declaration
  678. | declaration_list declaration
  679. """
  680. p[0] = p[1] if len(p) == 2 else p[1] + p[2]
  681. # To know when declaration-specifiers end and declarators begin,
  682. # we require declaration-specifiers to have at least one
  683. # type-specifier, and disallow typedef-names after we've seen any
  684. # type-specifier. These are both required by the spec.
  685. #
  686. def p_declaration_specifiers_no_type_1(self, p):
  687. """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt
  688. """
  689. p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
  690. def p_declaration_specifiers_no_type_2(self, p):
  691. """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt
  692. """
  693. p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
  694. def p_declaration_specifiers_no_type_3(self, p):
  695. """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt
  696. """
  697. p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
  698. # Without this, `typedef _Atomic(T) U` will parse incorrectly because the
  699. # _Atomic qualifier will match, instead of the specifier.
  700. def p_declaration_specifiers_no_type_4(self, p):
  701. """ declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt
  702. """
  703. p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
  704. def p_declaration_specifiers_no_type_5(self, p):
  705. """ declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt
  706. """
  707. p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment')
  708. def p_declaration_specifiers_1(self, p):
  709. """ declaration_specifiers : declaration_specifiers type_qualifier
  710. """
  711. p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
  712. def p_declaration_specifiers_2(self, p):
  713. """ declaration_specifiers : declaration_specifiers storage_class_specifier
  714. """
  715. p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True)
  716. def p_declaration_specifiers_3(self, p):
  717. """ declaration_specifiers : declaration_specifiers function_specifier
  718. """
  719. p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True)
  720. def p_declaration_specifiers_4(self, p):
  721. """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid
  722. """
  723. p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
  724. def p_declaration_specifiers_5(self, p):
  725. """ declaration_specifiers : type_specifier
  726. """
  727. p[0] = self._add_declaration_specifier(None, p[1], 'type')
  728. def p_declaration_specifiers_6(self, p):
  729. """ declaration_specifiers : declaration_specifiers_no_type type_specifier
  730. """
  731. p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
  732. def p_declaration_specifiers_7(self, p):
  733. """ declaration_specifiers : declaration_specifiers alignment_specifier
  734. """
  735. p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True)
  736. def p_storage_class_specifier(self, p):
  737. """ storage_class_specifier : AUTO
  738. | REGISTER
  739. | STATIC
  740. | EXTERN
  741. | TYPEDEF
  742. | _THREAD_LOCAL
  743. """
  744. p[0] = p[1]
  745. def p_function_specifier(self, p):
  746. """ function_specifier : INLINE
  747. | _NORETURN
  748. """
  749. p[0] = p[1]
  750. def p_type_specifier_no_typeid(self, p):
  751. """ type_specifier_no_typeid : VOID
  752. | _BOOL
  753. | CHAR
  754. | SHORT
  755. | INT
  756. | LONG
  757. | FLOAT
  758. | DOUBLE
  759. | _COMPLEX
  760. | SIGNED
  761. | UNSIGNED
  762. | __INT128
  763. """
  764. p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
  765. def p_type_specifier(self, p):
  766. """ type_specifier : typedef_name
  767. | enum_specifier
  768. | struct_or_union_specifier
  769. | type_specifier_no_typeid
  770. | atomic_specifier
  771. """
  772. p[0] = p[1]
  773. # See section 6.7.2.4 of the C11 standard.
  774. def p_atomic_specifier(self, p):
  775. """ atomic_specifier : _ATOMIC LPAREN type_name RPAREN
  776. """
  777. typ = p[3]
  778. typ.quals.append('_Atomic')
  779. p[0] = typ
  780. def p_type_qualifier(self, p):
  781. """ type_qualifier : CONST
  782. | RESTRICT
  783. | VOLATILE
  784. | _ATOMIC
  785. """
  786. p[0] = p[1]
  787. def p_init_declarator_list(self, p):
  788. """ init_declarator_list : init_declarator
  789. | init_declarator_list COMMA init_declarator
  790. """
  791. p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
  792. # Returns a {decl=<declarator> : init=<initializer>} dictionary
  793. # If there's no initializer, uses None
  794. #
  795. def p_init_declarator(self, p):
  796. """ init_declarator : declarator
  797. | declarator EQUALS initializer
  798. """
  799. p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
  800. def p_id_init_declarator_list(self, p):
  801. """ id_init_declarator_list : id_init_declarator
  802. | id_init_declarator_list COMMA init_declarator
  803. """
  804. p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
  805. def p_id_init_declarator(self, p):
  806. """ id_init_declarator : id_declarator
  807. | id_declarator EQUALS initializer
  808. """
  809. p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
  810. # Require at least one type specifier in a specifier-qualifier-list
  811. #
  812. def p_specifier_qualifier_list_1(self, p):
  813. """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid
  814. """
  815. p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
  816. def p_specifier_qualifier_list_2(self, p):
  817. """ specifier_qualifier_list : specifier_qualifier_list type_qualifier
  818. """
  819. p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
  820. def p_specifier_qualifier_list_3(self, p):
  821. """ specifier_qualifier_list : type_specifier
  822. """
  823. p[0] = self._add_declaration_specifier(None, p[1], 'type')
  824. def p_specifier_qualifier_list_4(self, p):
  825. """ specifier_qualifier_list : type_qualifier_list type_specifier
  826. """
  827. p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[])
  828. def p_specifier_qualifier_list_5(self, p):
  829. """ specifier_qualifier_list : alignment_specifier
  830. """
  831. p[0] = dict(qual=[], alignment=[p[1]], storage=[], type=[], function=[])
  832. def p_specifier_qualifier_list_6(self, p):
  833. """ specifier_qualifier_list : specifier_qualifier_list alignment_specifier
  834. """
  835. p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment')
  836. # TYPEID is allowed here (and in other struct/enum related tag names), because
  837. # struct/enum tags reside in their own namespace and can be named the same as types
  838. #
  839. def p_struct_or_union_specifier_1(self, p):
  840. """ struct_or_union_specifier : struct_or_union ID
  841. | struct_or_union TYPEID
  842. """
  843. klass = self._select_struct_union_class(p[1])
  844. # None means no list of members
  845. p[0] = klass(
  846. name=p[2],
  847. decls=None,
  848. coord=self._token_coord(p, 2))
  849. def p_struct_or_union_specifier_2(self, p):
  850. """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
  851. | struct_or_union brace_open brace_close
  852. """
  853. klass = self._select_struct_union_class(p[1])
  854. if len(p) == 4:
  855. # Empty sequence means an empty list of members
  856. p[0] = klass(
  857. name=None,
  858. decls=[],
  859. coord=self._token_coord(p, 2))
  860. else:
  861. p[0] = klass(
  862. name=None,
  863. decls=p[3],
  864. coord=self._token_coord(p, 2))
  865. def p_struct_or_union_specifier_3(self, p):
  866. """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
  867. | struct_or_union ID brace_open brace_close
  868. | struct_or_union TYPEID brace_open struct_declaration_list brace_close
  869. | struct_or_union TYPEID brace_open brace_close
  870. """
  871. klass = self._select_struct_union_class(p[1])
  872. if len(p) == 5:
  873. # Empty sequence means an empty list of members
  874. p[0] = klass(
  875. name=p[2],
  876. decls=[],
  877. coord=self._token_coord(p, 2))
  878. else:
  879. p[0] = klass(
  880. name=p[2],
  881. decls=p[4],
  882. coord=self._token_coord(p, 2))
  883. def p_struct_or_union(self, p):
  884. """ struct_or_union : STRUCT
  885. | UNION
  886. """
  887. p[0] = p[1]
  888. # Combine all declarations into a single list
  889. #
  890. def p_struct_declaration_list(self, p):
  891. """ struct_declaration_list : struct_declaration
  892. | struct_declaration_list struct_declaration
  893. """
  894. if len(p) == 2:
  895. p[0] = p[1] or []
  896. else:
  897. p[0] = p[1] + (p[2] or [])
  898. def p_struct_declaration_1(self, p):
  899. """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
  900. """
  901. spec = p[1]
  902. assert 'typedef' not in spec['storage']
  903. if p[2] is not None:
  904. decls = self._build_declarations(
  905. spec=spec,
  906. decls=p[2])
  907. elif len(spec['type']) == 1:
  908. # Anonymous struct/union, gcc extension, C1x feature.
  909. # Although the standard only allows structs/unions here, I see no
  910. # reason to disallow other types since some compilers have typedefs
  911. # here, and pycparser isn't about rejecting all invalid code.
  912. #
  913. node = spec['type'][0]
  914. if isinstance(node, c_ast.Node):
  915. decl_type = node
  916. else:
  917. decl_type = c_ast.IdentifierType(node)
  918. decls = self._build_declarations(
  919. spec=spec,
  920. decls=[dict(decl=decl_type)])
  921. else:
  922. # Structure/union members can have the same names as typedefs.
  923. # The trouble is that the member's name gets grouped into
  924. # specifier_qualifier_list; _build_declarations compensates.
  925. #
  926. decls = self._build_declarations(
  927. spec=spec,
  928. decls=[dict(decl=None, init=None)])
  929. p[0] = decls
  930. def p_struct_declaration_2(self, p):
  931. """ struct_declaration : SEMI
  932. """
  933. p[0] = None
  934. def p_struct_declaration_3(self, p):
  935. """ struct_declaration : pppragma_directive
  936. """
  937. p[0] = [p[1]]
  938. def p_struct_declarator_list(self, p):
  939. """ struct_declarator_list : struct_declarator
  940. | struct_declarator_list COMMA struct_declarator
  941. """
  942. p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
  943. # struct_declarator passes up a dict with the keys: decl (for
  944. # the underlying declarator) and bitsize (for the bitsize)
  945. #
  946. def p_struct_declarator_1(self, p):
  947. """ struct_declarator : declarator
  948. """
  949. p[0] = {'decl': p[1], 'bitsize': None}
  950. def p_struct_declarator_2(self, p):
  951. """ struct_declarator : declarator COLON constant_expression
  952. | COLON constant_expression
  953. """
  954. if len(p) > 3:
  955. p[0] = {'decl': p[1], 'bitsize': p[3]}
  956. else:
  957. p[0] = {'decl': c_ast.TypeDecl(None, None, None, None), 'bitsize': p[2]}
  958. def p_enum_specifier_1(self, p):
  959. """ enum_specifier : ENUM ID
  960. | ENUM TYPEID
  961. """
  962. p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
  963. def p_enum_specifier_2(self, p):
  964. """ enum_specifier : ENUM brace_open enumerator_list brace_close
  965. """
  966. p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
  967. def p_enum_specifier_3(self, p):
  968. """ enum_specifier : ENUM ID brace_open enumerator_list brace_close
  969. | ENUM TYPEID brace_open enumerator_list brace_close
  970. """
  971. p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
  972. def p_enumerator_list(self, p):
  973. """ enumerator_list : enumerator
  974. | enumerator_list COMMA
  975. | enumerator_list COMMA enumerator
  976. """
  977. if len(p) == 2:
  978. p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
  979. elif len(p) == 3:
  980. p[0] = p[1]
  981. else:
  982. p[1].enumerators.append(p[3])
  983. p[0] = p[1]
  984. def p_alignment_specifier(self, p):
  985. """ alignment_specifier : _ALIGNAS LPAREN type_name RPAREN
  986. | _ALIGNAS LPAREN constant_expression RPAREN
  987. """
  988. p[0] = c_ast.Alignas(p[3], self._token_coord(p, 1))
  989. def p_enumerator(self, p):
  990. """ enumerator : ID
  991. | ID EQUALS constant_expression
  992. """
  993. if len(p) == 2:
  994. enumerator = c_ast.Enumerator(
  995. p[1], None,
  996. self._token_coord(p, 1))
  997. else:
  998. enumerator = c_ast.Enumerator(
  999. p[1], p[3],
  1000. self._token_coord(p, 1))
  1001. self._add_identifier(enumerator.name, enumerator.coord)
  1002. p[0] = enumerator
  1003. def p_declarator(self, p):
  1004. """ declarator : id_declarator
  1005. | typeid_declarator
  1006. """
  1007. p[0] = p[1]
  1008. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1009. def p_xxx_declarator_1(self, p):
  1010. """ xxx_declarator : direct_xxx_declarator
  1011. """
  1012. p[0] = p[1]
  1013. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1014. def p_xxx_declarator_2(self, p):
  1015. """ xxx_declarator : pointer direct_xxx_declarator
  1016. """
  1017. p[0] = self._type_modify_decl(p[2], p[1])
  1018. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1019. def p_direct_xxx_declarator_1(self, p):
  1020. """ direct_xxx_declarator : yyy
  1021. """
  1022. p[0] = c_ast.TypeDecl(
  1023. declname=p[1],
  1024. type=None,
  1025. quals=None,
  1026. align=None,
  1027. coord=self._token_coord(p, 1))
  1028. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
  1029. def p_direct_xxx_declarator_2(self, p):
  1030. """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN
  1031. """
  1032. p[0] = p[2]
  1033. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1034. def p_direct_xxx_declarator_3(self, p):
  1035. """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
  1036. """
  1037. quals = (p[3] if len(p) > 5 else []) or []
  1038. # Accept dimension qualifiers
  1039. # Per C99 6.7.5.3 p7
  1040. arr = c_ast.ArrayDecl(
  1041. type=None,
  1042. dim=p[4] if len(p) > 5 else p[3],
  1043. dim_quals=quals,
  1044. coord=p[1].coord)
  1045. p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
  1046. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1047. def p_direct_xxx_declarator_4(self, p):
  1048. """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
  1049. | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
  1050. """
  1051. # Using slice notation for PLY objects doesn't work in Python 3 for the
  1052. # version of PLY embedded with pycparser; see PLY Google Code issue 30.
  1053. # Work around that here by listing the two elements separately.
  1054. listed_quals = [item if isinstance(item, list) else [item]
  1055. for item in [p[3],p[4]]]
  1056. dim_quals = [qual for sublist in listed_quals for qual in sublist
  1057. if qual is not None]
  1058. arr = c_ast.ArrayDecl(
  1059. type=None,
  1060. dim=p[5],
  1061. dim_quals=dim_quals,
  1062. coord=p[1].coord)
  1063. p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
  1064. # Special for VLAs
  1065. #
  1066. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1067. def p_direct_xxx_declarator_5(self, p):
  1068. """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
  1069. """
  1070. arr = c_ast.ArrayDecl(
  1071. type=None,
  1072. dim=c_ast.ID(p[4], self._token_coord(p, 4)),
  1073. dim_quals=p[3] if p[3] is not None else [],
  1074. coord=p[1].coord)
  1075. p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
  1076. @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID'))
  1077. def p_direct_xxx_declarator_6(self, p):
  1078. """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN
  1079. | direct_xxx_declarator LPAREN identifier_list_opt RPAREN
  1080. """
  1081. func = c_ast.FuncDecl(
  1082. args=p[3],
  1083. type=None,
  1084. coord=p[1].coord)
  1085. # To see why _get_yacc_lookahead_token is needed, consider:
  1086. # typedef char TT;
  1087. # void foo(int TT) { TT = 10; }
  1088. # Outside the function, TT is a typedef, but inside (starting and
  1089. # ending with the braces) it's a parameter. The trouble begins with
  1090. # yacc's lookahead token. We don't know if we're declaring or
  1091. # defining a function until we see LBRACE, but if we wait for yacc to
  1092. # trigger a rule on that token, then TT will have already been read
  1093. # and incorrectly interpreted as TYPEID. We need to add the
  1094. # parameters to the scope the moment the lexer sees LBRACE.
  1095. #
  1096. if self._get_yacc_lookahead_token().type == "LBRACE":
  1097. if func.args is not None:
  1098. for param in func.args.params:
  1099. if isinstance(param, c_ast.EllipsisParam): break
  1100. self._add_identifier(param.name, param.coord)
  1101. p[0] = self._type_modify_decl(decl=p[1], modifier=func)
  1102. def p_pointer(self, p):
  1103. """ pointer : TIMES type_qualifier_list_opt
  1104. | TIMES type_qualifier_list_opt pointer
  1105. """
  1106. coord = self._token_coord(p, 1)
  1107. # Pointer decls nest from inside out. This is important when different
  1108. # levels have different qualifiers. For example:
  1109. #
  1110. # char * const * p;
  1111. #
  1112. # Means "pointer to const pointer to char"
  1113. #
  1114. # While:
  1115. #
  1116. # char ** const p;
  1117. #
  1118. # Means "const pointer to pointer to char"
  1119. #
  1120. # So when we construct PtrDecl nestings, the leftmost pointer goes in
  1121. # as the most nested type.
  1122. nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
  1123. if len(p) > 3:
  1124. tail_type = p[3]
  1125. while tail_type.type is not None:
  1126. tail_type = tail_type.type
  1127. tail_type.type = nested_type
  1128. p[0] = p[3]
  1129. else:
  1130. p[0] = nested_type
  1131. def p_type_qualifier_list(self, p):
  1132. """ type_qualifier_list : type_qualifier
  1133. | type_qualifier_list type_qualifier
  1134. """
  1135. p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
  1136. def p_parameter_type_list(self, p):
  1137. """ parameter_type_list : parameter_list
  1138. | parameter_list COMMA ELLIPSIS
  1139. """
  1140. if len(p) > 2:
  1141. p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
  1142. p[0] = p[1]
  1143. def p_parameter_list(self, p):
  1144. """ parameter_list : parameter_declaration
  1145. | parameter_list COMMA parameter_declaration
  1146. """
  1147. if len(p) == 2: # single parameter
  1148. p[0] = c_ast.ParamList([p[1]], p[1].coord)
  1149. else:
  1150. p[1].params.append(p[3])
  1151. p[0] = p[1]
  1152. # From ISO/IEC 9899:TC2, 6.7.5.3.11:
  1153. # "If, in a parameter declaration, an identifier can be treated either
  1154. # as a typedef name or as a parameter name, it shall be taken as a
  1155. # typedef name."
  1156. #
  1157. # Inside a parameter declaration, once we've reduced declaration specifiers,
  1158. # if we shift in an LPAREN and see a TYPEID, it could be either an abstract
  1159. # declarator or a declarator nested inside parens. This rule tells us to
  1160. # always treat it as an abstract declarator. Therefore, we only accept
  1161. # `id_declarator`s and `typeid_noparen_declarator`s.
  1162. def p_parameter_declaration_1(self, p):
  1163. """ parameter_declaration : declaration_specifiers id_declarator
  1164. | declaration_specifiers typeid_noparen_declarator
  1165. """
  1166. spec = p[1]
  1167. if not spec['type']:
  1168. spec['type'] = [c_ast.IdentifierType(['int'],
  1169. coord=self._token_coord(p, 1))]
  1170. p[0] = self._build_declarations(
  1171. spec=spec,
  1172. decls=[dict(decl=p[2])])[0]
  1173. def p_parameter_declaration_2(self, p):
  1174. """ parameter_declaration : declaration_specifiers abstract_declarator_opt
  1175. """
  1176. spec = p[1]
  1177. if not spec['type']:
  1178. spec['type'] = [c_ast.IdentifierType(['int'],
  1179. coord=self._token_coord(p, 1))]
  1180. # Parameters can have the same names as typedefs. The trouble is that
  1181. # the parameter's name gets grouped into declaration_specifiers, making
  1182. # it look like an old-style declaration; compensate.
  1183. #
  1184. if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
  1185. self._is_type_in_scope(spec['type'][-1].names[0]):
  1186. decl = self._build_declarations(
  1187. spec=spec,
  1188. decls=[dict(decl=p[2], init=None)])[0]
  1189. # This truly is an old-style parameter declaration
  1190. #
  1191. else:
  1192. decl = c_ast.Typename(
  1193. name='',
  1194. quals=spec['qual'],
  1195. align=None,
  1196. type=p[2] or c_ast.TypeDecl(None, None, None, None),
  1197. coord=self._token_coord(p, 2))
  1198. typename = spec['type']
  1199. decl = self._fix_decl_name_type(decl, typename)
  1200. p[0] = decl
  1201. def p_identifier_list(self, p):
  1202. """ identifier_list : identifier
  1203. | identifier_list COMMA identifier
  1204. """
  1205. if len(p) == 2: # single parameter
  1206. p[0] = c_ast.ParamList([p[1]], p[1].coord)
  1207. else:
  1208. p[1].params.append(p[3])
  1209. p[0] = p[1]
  1210. def p_initializer_1(self, p):
  1211. """ initializer : assignment_expression
  1212. """
  1213. p[0] = p[1]
  1214. def p_initializer_2(self, p):
  1215. """ initializer : brace_open initializer_list_opt brace_close
  1216. | brace_open initializer_list COMMA brace_close
  1217. """
  1218. if p[2] is None:
  1219. p[0] = c_ast.InitList([], self._token_coord(p, 1))
  1220. else:
  1221. p[0] = p[2]
  1222. def p_initializer_list(self, p):
  1223. """ initializer_list : designation_opt initializer
  1224. | initializer_list COMMA designation_opt initializer
  1225. """
  1226. if len(p) == 3: # single initializer
  1227. init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
  1228. p[0] = c_ast.InitList([init], p[2].coord)
  1229. else:
  1230. init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
  1231. p[1].exprs.append(init)
  1232. p[0] = p[1]
  1233. def p_designation(self, p):
  1234. """ designation : designator_list EQUALS
  1235. """
  1236. p[0] = p[1]
  1237. # Designators are represented as a list of nodes, in the order in which
  1238. # they're written in the code.
  1239. #
  1240. def p_designator_list(self, p):
  1241. """ designator_list : designator
  1242. | designator_list designator
  1243. """
  1244. p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
  1245. def p_designator(self, p):
  1246. """ designator : LBRACKET constant_expression RBRACKET
  1247. | PERIOD identifier
  1248. """
  1249. p[0] = p[2]
  1250. def p_type_name(self, p):
  1251. """ type_name : specifier_qualifier_list abstract_declarator_opt
  1252. """
  1253. typename = c_ast.Typename(
  1254. name='',
  1255. quals=p[1]['qual'][:],
  1256. align=None,
  1257. type=p[2] or c_ast.TypeDecl(None, None, None, None),
  1258. coord=self._token_coord(p, 2))
  1259. p[0] = self._fix_decl_name_type(typename, p[1]['type'])
  1260. def p_abstract_declarator_1(self, p):
  1261. """ abstract_declarator : pointer
  1262. """
  1263. dummytype = c_ast.TypeDecl(None, None, None, None)
  1264. p[0] = self._type_modify_decl(
  1265. decl=dummytype,
  1266. modifier=p[1])
  1267. def p_abstract_declarator_2(self, p):
  1268. """ abstract_declarator : pointer direct_abstract_declarator
  1269. """
  1270. p[0] = self._type_modify_decl(p[2], p[1])
  1271. def p_abstract_declarator_3(self, p):
  1272. """ abstract_declarator : direct_abstract_declarator
  1273. """
  1274. p[0] = p[1]
  1275. # Creating and using direct_abstract_declarator_opt here
  1276. # instead of listing both direct_abstract_declarator and the
  1277. # lack of it in the beginning of _1 and _2 caused two
  1278. # shift/reduce errors.
  1279. #
  1280. def p_direct_abstract_declarator_1(self, p):
  1281. """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
  1282. p[0] = p[2]
  1283. def p_direct_abstract_declarator_2(self, p):
  1284. """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
  1285. """
  1286. arr = c_ast.ArrayDecl(
  1287. type=None,
  1288. dim=p[3],
  1289. dim_quals=[],
  1290. coord=p[1].coord)
  1291. p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
  1292. def p_direct_abstract_declarator_3(self, p):
  1293. """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
  1294. """
  1295. quals = (p[2] if len(p) > 4 else []) or []
  1296. p[0] = c_ast.ArrayDecl(
  1297. type=c_ast.TypeDecl(None, None, None, None),
  1298. dim=p[3] if len(p) > 4 else p[2],
  1299. dim_quals=quals,
  1300. coord=self._token_coord(p, 1))
  1301. def p_direct_abstract_declarator_4(self, p):
  1302. """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
  1303. """
  1304. arr = c_ast.ArrayDecl(
  1305. type=None,
  1306. dim=c_ast.ID(p[3], self._token_coord(p, 3)),
  1307. dim_quals=[],
  1308. coord=p[1].coord)
  1309. p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
  1310. def p_direct_abstract_declarator_5(self, p):
  1311. """ direct_abstract_declarator : LBRACKET TIMES RBRACKET
  1312. """
  1313. p[0] = c_ast.ArrayDecl(
  1314. type=c_ast.TypeDecl(None, None, None, None),
  1315. dim=c_ast.ID(p[3], self._token_coord(p, 3)),
  1316. dim_quals=[],
  1317. coord=self._token_coord(p, 1))
  1318. def p_direct_abstract_declarator_6(self, p):
  1319. """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
  1320. """
  1321. func = c_ast.FuncDecl(
  1322. args=p[3],
  1323. type=None,
  1324. coord=p[1].coord)
  1325. p[0] = self._type_modify_decl(decl=p[1], modifier=func)
  1326. def p_direct_abstract_declarator_7(self, p):
  1327. """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
  1328. """
  1329. p[0] = c_ast.FuncDecl(
  1330. args=p[2],
  1331. type=c_ast.TypeDecl(None, None, None, None),
  1332. coord=self._token_coord(p, 1))
  1333. # declaration is a list, statement isn't. To make it consistent, block_item
  1334. # will always be a list
  1335. #
  1336. def p_block_item(self, p):
  1337. """ block_item : declaration
  1338. | statement
  1339. """
  1340. p[0] = p[1] if isinstance(p[1], list) else [p[1]]
  1341. # Since we made block_item a list, this just combines lists
  1342. #
  1343. def p_block_item_list(self, p):
  1344. """ block_item_list : block_item
  1345. | block_item_list block_item
  1346. """
  1347. # Empty block items (plain ';') produce [None], so ignore them
  1348. p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
  1349. def p_compound_statement_1(self, p):
  1350. """ compound_statement : brace_open block_item_list_opt brace_close """
  1351. p[0] = c_ast.Compound(
  1352. block_items=p[2],
  1353. coord=self._token_coord(p, 1))
  1354. def p_labeled_statement_1(self, p):
  1355. """ labeled_statement : ID COLON pragmacomp_or_statement """
  1356. p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
  1357. def p_labeled_statement_2(self, p):
  1358. """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """
  1359. p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
  1360. def p_labeled_statement_3(self, p):
  1361. """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """
  1362. p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
  1363. def p_selection_statement_1(self, p):
  1364. """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """
  1365. p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
  1366. def p_selection_statement_2(self, p):
  1367. """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """
  1368. p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
  1369. def p_selection_statement_3(self, p):
  1370. """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """
  1371. p[0] = fix_switch_cases(
  1372. c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
  1373. def p_iteration_statement_1(self, p):
  1374. """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """
  1375. p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
  1376. def p_iteration_statement_2(self, p):
  1377. """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """
  1378. p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
  1379. def p_iteration_statement_3(self, p):
  1380. """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
  1381. p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
  1382. def p_iteration_statement_4(self, p):
  1383. """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """
  1384. p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
  1385. p[4], p[6], p[8], self._token_coord(p, 1))
  1386. def p_jump_statement_1(self, p):
  1387. """ jump_statement : GOTO ID SEMI """
  1388. p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
  1389. def p_jump_statement_2(self, p):
  1390. """ jump_statement : BREAK SEMI """
  1391. p[0] = c_ast.Break(self._token_coord(p, 1))
  1392. def p_jump_statement_3(self, p):
  1393. """ jump_statement : CONTINUE SEMI """
  1394. p[0] = c_ast.Continue(self._token_coord(p, 1))
  1395. def p_jump_statement_4(self, p):
  1396. """ jump_statement : RETURN expression SEMI
  1397. | RETURN SEMI
  1398. """
  1399. p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
  1400. def p_expression_statement(self, p):
  1401. """ expression_statement : expression_opt SEMI """
  1402. if p[1] is None:
  1403. p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
  1404. else:
  1405. p[0] = p[1]
  1406. def p_expression(self, p):
  1407. """ expression : assignment_expression
  1408. | expression COMMA assignment_expression
  1409. """
  1410. if len(p) == 2:
  1411. p[0] = p[1]
  1412. else:
  1413. if not isinstance(p[1], c_ast.ExprList):
  1414. p[1] = c_ast.ExprList([p[1]], p[1].coord)
  1415. p[1].exprs.append(p[3])
  1416. p[0] = p[1]
  1417. def p_parenthesized_compound_expression(self, p):
  1418. """ assignment_expression : LPAREN compound_statement RPAREN """
  1419. p[0] = p[2]
  1420. def p_typedef_name(self, p):
  1421. """ typedef_name : TYPEID """
  1422. p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
  1423. def p_assignment_expression(self, p):
  1424. """ assignment_expression : conditional_expression
  1425. | unary_expression assignment_operator assignment_expression
  1426. """
  1427. if len(p) == 2:
  1428. p[0] = p[1]
  1429. else:
  1430. p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
  1431. # K&R2 defines these as many separate rules, to encode
  1432. # precedence and associativity. Why work hard ? I'll just use
  1433. # the built in precedence/associativity specification feature
  1434. # of PLY. (see precedence declaration above)
  1435. #
  1436. def p_assignment_operator(self, p):
  1437. """ assignment_operator : EQUALS
  1438. | XOREQUAL
  1439. | TIMESEQUAL
  1440. | DIVEQUAL
  1441. | MODEQUAL
  1442. | PLUSEQUAL
  1443. | MINUSEQUAL
  1444. | LSHIFTEQUAL
  1445. | RSHIFTEQUAL
  1446. | ANDEQUAL
  1447. | OREQUAL
  1448. """
  1449. p[0] = p[1]
  1450. def p_constant_expression(self, p):
  1451. """ constant_expression : conditional_expression """
  1452. p[0] = p[1]
  1453. def p_conditional_expression(self, p):
  1454. """ conditional_expression : binary_expression
  1455. | binary_expression CONDOP expression COLON conditional_expression
  1456. """
  1457. if len(p) == 2:
  1458. p[0] = p[1]
  1459. else:
  1460. p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
  1461. def p_binary_expression(self, p):
  1462. """ binary_expression : cast_expression
  1463. | binary_expression TIMES binary_expression
  1464. | binary_expression DIVIDE binary_expression
  1465. | binary_expression MOD binary_expression
  1466. | binary_expression PLUS binary_expression
  1467. | binary_expression MINUS binary_expression
  1468. | binary_expression RSHIFT binary_expression
  1469. | binary_expression LSHIFT binary_expression
  1470. | binary_expression LT binary_expression
  1471. | binary_expression LE binary_expression
  1472. | binary_expression GE binary_expression
  1473. | binary_expression GT binary_expression
  1474. | binary_expression EQ binary_expression
  1475. | binary_expression NE binary_expression
  1476. | binary_expression AND binary_expression
  1477. | binary_expression OR binary_expression
  1478. | binary_expression XOR binary_expression
  1479. | binary_expression LAND binary_expression
  1480. | binary_expression LOR binary_expression
  1481. """
  1482. if len(p) == 2:
  1483. p[0] = p[1]
  1484. else:
  1485. p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
  1486. def p_cast_expression_1(self, p):
  1487. """ cast_expression : unary_expression """
  1488. p[0] = p[1]
  1489. def p_cast_expression_2(self, p):
  1490. """ cast_expression : LPAREN type_name RPAREN cast_expression """
  1491. p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
  1492. def p_unary_expression_1(self, p):
  1493. """ unary_expression : postfix_expression """
  1494. p[0] = p[1]
  1495. def p_unary_expression_2(self, p):
  1496. """ unary_expression : PLUSPLUS unary_expression
  1497. | MINUSMINUS unary_expression
  1498. | unary_operator cast_expression
  1499. """
  1500. p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
  1501. def p_unary_expression_3(self, p):
  1502. """ unary_expression : SIZEOF unary_expression
  1503. | SIZEOF LPAREN type_name RPAREN
  1504. | _ALIGNOF LPAREN type_name RPAREN
  1505. """
  1506. p[0] = c_ast.UnaryOp(
  1507. p[1],
  1508. p[2] if len(p) == 3 else p[3],
  1509. self._token_coord(p, 1))
  1510. def p_unary_operator(self, p):
  1511. """ unary_operator : AND
  1512. | TIMES
  1513. | PLUS
  1514. | MINUS
  1515. | NOT
  1516. | LNOT
  1517. """
  1518. p[0] = p[1]
  1519. def p_postfix_expression_1(self, p):
  1520. """ postfix_expression : primary_expression """
  1521. p[0] = p[1]
  1522. def p_postfix_expression_2(self, p):
  1523. """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """
  1524. p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
  1525. def p_postfix_expression_3(self, p):
  1526. """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
  1527. | postfix_expression LPAREN RPAREN
  1528. """
  1529. p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
  1530. def p_postfix_expression_4(self, p):
  1531. """ postfix_expression : postfix_expression PERIOD ID
  1532. | postfix_expression PERIOD TYPEID
  1533. | postfix_expression ARROW ID
  1534. | postfix_expression ARROW TYPEID
  1535. """
  1536. field = c_ast.ID(p[3], self._token_coord(p, 3))
  1537. p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
  1538. def p_postfix_expression_5(self, p):
  1539. """ postfix_expression : postfix_expression PLUSPLUS
  1540. | postfix_expression MINUSMINUS
  1541. """
  1542. p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
  1543. def p_postfix_expression_6(self, p):
  1544. """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
  1545. | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
  1546. """
  1547. p[0] = c_ast.CompoundLiteral(p[2], p[5])
  1548. def p_primary_expression_1(self, p):
  1549. """ primary_expression : identifier """
  1550. p[0] = p[1]
  1551. def p_primary_expression_2(self, p):
  1552. """ primary_expression : constant """
  1553. p[0] = p[1]
  1554. def p_primary_expression_3(self, p):
  1555. """ primary_expression : unified_string_literal
  1556. | unified_wstring_literal
  1557. """
  1558. p[0] = p[1]
  1559. def p_primary_expression_4(self, p):
  1560. """ primary_expression : LPAREN expression RPAREN """
  1561. p[0] = p[2]
  1562. def p_primary_expression_5(self, p):
  1563. """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
  1564. """
  1565. coord = self._token_coord(p, 1)
  1566. p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
  1567. c_ast.ExprList([p[3], p[5]], coord),
  1568. coord)
  1569. def p_offsetof_member_designator(self, p):
  1570. """ offsetof_member_designator : identifier
  1571. | offsetof_member_designator PERIOD identifier
  1572. | offsetof_member_designator LBRACKET expression RBRACKET
  1573. """
  1574. if len(p) == 2:
  1575. p[0] = p[1]
  1576. elif len(p) == 4:
  1577. p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord)
  1578. elif len(p) == 5:
  1579. p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
  1580. else:
  1581. raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p))
  1582. def p_argument_expression_list(self, p):
  1583. """ argument_expression_list : assignment_expression
  1584. | argument_expression_list COMMA assignment_expression
  1585. """
  1586. if len(p) == 2: # single expr
  1587. p[0] = c_ast.ExprList([p[1]], p[1].coord)
  1588. else:
  1589. p[1].exprs.append(p[3])
  1590. p[0] = p[1]
  1591. def p_identifier(self, p):
  1592. """ identifier : ID """
  1593. p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
  1594. def p_constant_1(self, p):
  1595. """ constant : INT_CONST_DEC
  1596. | INT_CONST_OCT
  1597. | INT_CONST_HEX
  1598. | INT_CONST_BIN
  1599. | INT_CONST_CHAR
  1600. """
  1601. uCount = 0
  1602. lCount = 0
  1603. for x in p[1][-3:]:
  1604. if x in ('l', 'L'):
  1605. lCount += 1
  1606. elif x in ('u', 'U'):
  1607. uCount += 1
  1608. t = ''
  1609. if uCount > 1:
  1610. raise ValueError('Constant cannot have more than one u/U suffix.')
  1611. elif lCount > 2:
  1612. raise ValueError('Constant cannot have more than two l/L suffix.')
  1613. prefix = 'unsigned ' * uCount + 'long ' * lCount
  1614. p[0] = c_ast.Constant(
  1615. prefix + 'int', p[1], self._token_coord(p, 1))
  1616. def p_constant_2(self, p):
  1617. """ constant : FLOAT_CONST
  1618. | HEX_FLOAT_CONST
  1619. """
  1620. if 'x' in p[1].lower():
  1621. t = 'float'
  1622. else:
  1623. if p[1][-1] in ('f', 'F'):
  1624. t = 'float'
  1625. elif p[1][-1] in ('l', 'L'):
  1626. t = 'long double'
  1627. else:
  1628. t = 'double'
  1629. p[0] = c_ast.Constant(
  1630. t, p[1], self._token_coord(p, 1))
  1631. def p_constant_3(self, p):
  1632. """ constant : CHAR_CONST
  1633. | WCHAR_CONST
  1634. | U8CHAR_CONST
  1635. | U16CHAR_CONST
  1636. | U32CHAR_CONST
  1637. """
  1638. p[0] = c_ast.Constant(
  1639. 'char', p[1], self._token_coord(p, 1))
  1640. # The "unified" string and wstring literal rules are for supporting
  1641. # concatenation of adjacent string literals.
  1642. # I.e. "hello " "world" is seen by the C compiler as a single string literal
  1643. # with the value "hello world"
  1644. #
  1645. def p_unified_string_literal(self, p):
  1646. """ unified_string_literal : STRING_LITERAL
  1647. | unified_string_literal STRING_LITERAL
  1648. """
  1649. if len(p) == 2: # single literal
  1650. p[0] = c_ast.Constant(
  1651. 'string', p[1], self._token_coord(p, 1))
  1652. else:
  1653. p[1].value = p[1].value[:-1] + p[2][1:]
  1654. p[0] = p[1]
  1655. def p_unified_wstring_literal(self, p):
  1656. """ unified_wstring_literal : WSTRING_LITERAL
  1657. | U8STRING_LITERAL
  1658. | U16STRING_LITERAL
  1659. | U32STRING_LITERAL
  1660. | unified_wstring_literal WSTRING_LITERAL
  1661. | unified_wstring_literal U8STRING_LITERAL
  1662. | unified_wstring_literal U16STRING_LITERAL
  1663. | unified_wstring_literal U32STRING_LITERAL
  1664. """
  1665. if len(p) == 2: # single literal
  1666. p[0] = c_ast.Constant(
  1667. 'string', p[1], self._token_coord(p, 1))
  1668. else:
  1669. p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
  1670. p[0] = p[1]
  1671. def p_brace_open(self, p):
  1672. """ brace_open : LBRACE
  1673. """
  1674. p[0] = p[1]
  1675. p.set_lineno(0, p.lineno(1))
  1676. def p_brace_close(self, p):
  1677. """ brace_close : RBRACE
  1678. """
  1679. p[0] = p[1]
  1680. p.set_lineno(0, p.lineno(1))
  1681. def p_empty(self, p):
  1682. 'empty : '
  1683. p[0] = None
  1684. def p_error(self, p):
  1685. # If error recovery is added here in the future, make sure
  1686. # _get_yacc_lookahead_token still works!
  1687. #
  1688. if p:
  1689. self._parse_error(
  1690. 'before: %s' % p.value,
  1691. self._coord(lineno=p.lineno,
  1692. column=self.clex.find_tok_column(p)))
  1693. else:
  1694. self._parse_error('At end of input', self.clex.filename)