您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

1011 行
29KB

  1. /* Header file for SSA iterators.
  2. Copyright (C) 2013-2020 Free Software Foundation, Inc.
  3. This file is part of GCC.
  4. GCC is free software; you can redistribute it and/or modify it under
  5. the terms of the GNU General Public License as published by the Free
  6. Software Foundation; either version 3, or (at your option) any later
  7. version.
  8. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  9. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  11. for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with GCC; see the file COPYING3. If not see
  14. <http://www.gnu.org/licenses/>. */
  15. #ifndef GCC_SSA_ITERATORS_H
  16. #define GCC_SSA_ITERATORS_H
  17. /* Immediate use lists are used to directly access all uses for an SSA
  18. name and get pointers to the statement for each use.
  19. The structure ssa_use_operand_t consists of PREV and NEXT pointers
  20. to maintain the list. A USE pointer, which points to address where
  21. the use is located and a LOC pointer which can point to the
  22. statement where the use is located, or, in the case of the root
  23. node, it points to the SSA name itself.
  24. The list is anchored by an occurrence of ssa_operand_d *in* the
  25. ssa_name node itself (named 'imm_uses'). This node is uniquely
  26. identified by having a NULL USE pointer. and the LOC pointer
  27. pointing back to the ssa_name node itself. This node forms the
  28. base for a circular list, and initially this is the only node in
  29. the list.
  30. Fast iteration allows each use to be examined, but does not allow
  31. any modifications to the uses or stmts.
  32. Normal iteration allows insertion, deletion, and modification. the
  33. iterator manages this by inserting a marker node into the list
  34. immediately before the node currently being examined in the list.
  35. this marker node is uniquely identified by having null stmt *and* a
  36. null use pointer.
  37. When iterating to the next use, the iteration routines check to see
  38. if the node after the marker has changed. if it has, then the node
  39. following the marker is now the next one to be visited. if not, the
  40. marker node is moved past that node in the list (visualize it as
  41. bumping the marker node through the list). this continues until
  42. the marker node is moved to the original anchor position. the
  43. marker node is then removed from the list.
  44. If iteration is halted early, the marker node must be removed from
  45. the list before continuing. */
  46. struct imm_use_iterator
  47. {
  48. /* This is the current use the iterator is processing. */
  49. ssa_use_operand_t *imm_use;
  50. /* This marks the last use in the list (use node from SSA_NAME) */
  51. ssa_use_operand_t *end_p;
  52. /* This node is inserted and used to mark the end of the uses for a stmt. */
  53. ssa_use_operand_t iter_node;
  54. /* This is the next ssa_name to visit. IMM_USE may get removed before
  55. the next one is traversed to, so it must be cached early. */
  56. ssa_use_operand_t *next_imm_name;
  57. };
  58. /* Use this iterator when simply looking at stmts. Adding, deleting or
  59. modifying stmts will cause this iterator to malfunction. */
  60. #define FOR_EACH_IMM_USE_FAST(DEST, ITER, SSAVAR) \
  61. for ((DEST) = first_readonly_imm_use (&(ITER), (SSAVAR)); \
  62. !end_readonly_imm_use_p (&(ITER)); \
  63. (void) ((DEST) = next_readonly_imm_use (&(ITER))))
  64. /* Use this iterator to visit each stmt which has a use of SSAVAR. */
  65. #define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
  66. for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
  67. !end_imm_use_stmt_p (&(ITER)); \
  68. (void) ((STMT) = next_imm_use_stmt (&(ITER))))
  69. /* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
  70. do so will result in leaving a iterator marker node in the immediate
  71. use list, and nothing good will come from that. */
  72. #define BREAK_FROM_IMM_USE_STMT(ITER) \
  73. { \
  74. end_imm_use_stmt_traverse (&(ITER)); \
  75. break; \
  76. }
  77. /* Similarly for return. */
  78. #define RETURN_FROM_IMM_USE_STMT(ITER, VAL) \
  79. { \
  80. end_imm_use_stmt_traverse (&(ITER)); \
  81. return (VAL); \
  82. }
  83. /* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
  84. get access to each occurrence of ssavar on the stmt returned by
  85. that iterator.. for instance:
  86. FOR_EACH_IMM_USE_STMT (stmt, iter, ssavar)
  87. {
  88. FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
  89. {
  90. SET_USE (use_p, blah);
  91. }
  92. update_stmt (stmt);
  93. } */
  94. #define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \
  95. for ((DEST) = first_imm_use_on_stmt (&(ITER)); \
  96. !end_imm_use_on_stmt_p (&(ITER)); \
  97. (void) ((DEST) = next_imm_use_on_stmt (&(ITER))))
  98. extern bool single_imm_use_1 (const ssa_use_operand_t *head,
  99. use_operand_p *use_p, gimple **stmt);
  100. enum ssa_op_iter_type {
  101. ssa_op_iter_none = 0,
  102. ssa_op_iter_tree,
  103. ssa_op_iter_use,
  104. ssa_op_iter_def
  105. };
  106. /* This structure is used in the operand iterator loops. It contains the
  107. items required to determine which operand is retrieved next. During
  108. optimization, this structure is scalarized, and any unused fields are
  109. optimized away, resulting in little overhead. */
  110. struct ssa_op_iter
  111. {
  112. enum ssa_op_iter_type iter_type;
  113. bool done;
  114. int flags;
  115. unsigned i;
  116. unsigned numops;
  117. use_optype_p uses;
  118. gimple *stmt;
  119. };
  120. /* NOTE: Keep these in sync with doc/tree-ssa.texi. */
  121. /* These flags are used to determine which operands are returned during
  122. execution of the loop. */
  123. #define SSA_OP_USE 0x01 /* Real USE operands. */
  124. #define SSA_OP_DEF 0x02 /* Real DEF operands. */
  125. #define SSA_OP_VUSE 0x04 /* VUSE operands. */
  126. #define SSA_OP_VDEF 0x08 /* VDEF operands. */
  127. /* These are commonly grouped operand flags. */
  128. #define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE)
  129. #define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
  130. #define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
  131. #define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
  132. #define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
  133. #define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
  134. /* This macro executes a loop over the operands of STMT specified in FLAG,
  135. returning each operand as a 'tree' in the variable TREEVAR. ITER is an
  136. ssa_op_iter structure used to control the loop. */
  137. #define FOR_EACH_SSA_TREE_OPERAND(TREEVAR, STMT, ITER, FLAGS) \
  138. for (TREEVAR = op_iter_init_tree (&(ITER), STMT, FLAGS); \
  139. !op_iter_done (&(ITER)); \
  140. (void) (TREEVAR = op_iter_next_tree (&(ITER))))
  141. /* This macro executes a loop over the operands of STMT specified in FLAG,
  142. returning each operand as a 'use_operand_p' in the variable USEVAR.
  143. ITER is an ssa_op_iter structure used to control the loop. */
  144. #define FOR_EACH_SSA_USE_OPERAND(USEVAR, STMT, ITER, FLAGS) \
  145. for (USEVAR = op_iter_init_use (&(ITER), STMT, FLAGS); \
  146. !op_iter_done (&(ITER)); \
  147. USEVAR = op_iter_next_use (&(ITER)))
  148. /* This macro executes a loop over the operands of STMT specified in FLAG,
  149. returning each operand as a 'def_operand_p' in the variable DEFVAR.
  150. ITER is an ssa_op_iter structure used to control the loop. */
  151. #define FOR_EACH_SSA_DEF_OPERAND(DEFVAR, STMT, ITER, FLAGS) \
  152. for (DEFVAR = op_iter_init_def (&(ITER), STMT, FLAGS); \
  153. !op_iter_done (&(ITER)); \
  154. DEFVAR = op_iter_next_def (&(ITER)))
  155. /* This macro will execute a loop over all the arguments of a PHI which
  156. match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
  157. can be either SSA_OP_USE or SSA_OP_VIRTUAL_USES or SSA_OP_ALL_USES. */
  158. #define FOR_EACH_PHI_ARG(USEVAR, STMT, ITER, FLAGS) \
  159. for ((USEVAR) = op_iter_init_phiuse (&(ITER), STMT, FLAGS); \
  160. !op_iter_done (&(ITER)); \
  161. (USEVAR) = op_iter_next_use (&(ITER)))
  162. /* This macro will execute a loop over a stmt, regardless of whether it is
  163. a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
  164. #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
  165. for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
  166. ? op_iter_init_phiuse (&(ITER), \
  167. as_a <gphi *> (STMT), \
  168. FLAGS) \
  169. : op_iter_init_use (&(ITER), STMT, FLAGS)); \
  170. !op_iter_done (&(ITER)); \
  171. (USEVAR) = op_iter_next_use (&(ITER)))
  172. /* This macro will execute a loop over a stmt, regardless of whether it is
  173. a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
  174. #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
  175. for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
  176. ? op_iter_init_phidef (&(ITER), \
  177. as_a <gphi *> (STMT), \
  178. FLAGS) \
  179. : op_iter_init_def (&(ITER), STMT, FLAGS)); \
  180. !op_iter_done (&(ITER)); \
  181. (DEFVAR) = op_iter_next_def (&(ITER)))
  182. /* This macro returns an operand in STMT as a tree if it is the ONLY
  183. operand matching FLAGS. If there are 0 or more than 1 operand matching
  184. FLAGS, then NULL_TREE is returned. */
  185. #define SINGLE_SSA_TREE_OPERAND(STMT, FLAGS) \
  186. single_ssa_tree_operand (STMT, FLAGS)
  187. /* This macro returns an operand in STMT as a use_operand_p if it is the ONLY
  188. operand matching FLAGS. If there are 0 or more than 1 operand matching
  189. FLAGS, then NULL_USE_OPERAND_P is returned. */
  190. #define SINGLE_SSA_USE_OPERAND(STMT, FLAGS) \
  191. single_ssa_use_operand (STMT, FLAGS)
  192. /* This macro returns an operand in STMT as a def_operand_p if it is the ONLY
  193. operand matching FLAGS. If there are 0 or more than 1 operand matching
  194. FLAGS, then NULL_DEF_OPERAND_P is returned. */
  195. #define SINGLE_SSA_DEF_OPERAND(STMT, FLAGS) \
  196. single_ssa_def_operand (STMT, FLAGS)
  197. /* This macro returns TRUE if there are no operands matching FLAGS in STMT. */
  198. #define ZERO_SSA_OPERANDS(STMT, FLAGS) zero_ssa_operands (STMT, FLAGS)
  199. /* This macro counts the number of operands in STMT matching FLAGS. */
  200. #define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
  201. /* Delink an immediate_uses node from its chain. */
  202. static inline void
  203. delink_imm_use (ssa_use_operand_t *linknode)
  204. {
  205. /* Return if this node is not in a list. */
  206. if (linknode->prev == NULL)
  207. return;
  208. linknode->prev->next = linknode->next;
  209. linknode->next->prev = linknode->prev;
  210. linknode->prev = NULL;
  211. linknode->next = NULL;
  212. }
  213. /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
  214. static inline void
  215. link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
  216. {
  217. /* Link the new node at the head of the list. If we are in the process of
  218. traversing the list, we won't visit any new nodes added to it. */
  219. linknode->prev = list;
  220. linknode->next = list->next;
  221. list->next->prev = linknode;
  222. list->next = linknode;
  223. }
  224. /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
  225. static inline void
  226. link_imm_use (ssa_use_operand_t *linknode, tree def)
  227. {
  228. ssa_use_operand_t *root;
  229. if (!def || TREE_CODE (def) != SSA_NAME)
  230. linknode->prev = NULL;
  231. else
  232. {
  233. root = &(SSA_NAME_IMM_USE_NODE (def));
  234. if (linknode->use)
  235. gcc_checking_assert (*(linknode->use) == def);
  236. link_imm_use_to_list (linknode, root);
  237. }
  238. }
  239. /* Set the value of a use pointed to by USE to VAL. */
  240. static inline void
  241. set_ssa_use_from_ptr (use_operand_p use, tree val)
  242. {
  243. delink_imm_use (use);
  244. *(use->use) = val;
  245. link_imm_use (use, val);
  246. }
  247. /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
  248. in STMT. */
  249. static inline void
  250. link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple *stmt)
  251. {
  252. if (stmt)
  253. link_imm_use (linknode, def);
  254. else
  255. link_imm_use (linknode, NULL);
  256. linknode->loc.stmt = stmt;
  257. }
  258. /* Relink a new node in place of an old node in the list. */
  259. static inline void
  260. relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
  261. {
  262. /* The node one had better be in the same list. */
  263. gcc_checking_assert (*(old->use) == *(node->use));
  264. node->prev = old->prev;
  265. node->next = old->next;
  266. if (old->prev)
  267. {
  268. old->prev->next = node;
  269. old->next->prev = node;
  270. /* Remove the old node from the list. */
  271. old->prev = NULL;
  272. }
  273. }
  274. /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
  275. in STMT. */
  276. static inline void
  277. relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
  278. gimple *stmt)
  279. {
  280. if (stmt)
  281. relink_imm_use (linknode, old);
  282. else
  283. link_imm_use (linknode, NULL);
  284. linknode->loc.stmt = stmt;
  285. }
  286. /* Return true is IMM has reached the end of the immediate use list. */
  287. static inline bool
  288. end_readonly_imm_use_p (const imm_use_iterator *imm)
  289. {
  290. return (imm->imm_use == imm->end_p);
  291. }
  292. /* Initialize iterator IMM to process the list for VAR. */
  293. static inline use_operand_p
  294. first_readonly_imm_use (imm_use_iterator *imm, tree var)
  295. {
  296. imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
  297. imm->imm_use = imm->end_p->next;
  298. imm->iter_node.next = imm->imm_use->next;
  299. if (end_readonly_imm_use_p (imm))
  300. return NULL_USE_OPERAND_P;
  301. return imm->imm_use;
  302. }
  303. /* Bump IMM to the next use in the list. */
  304. static inline use_operand_p
  305. next_readonly_imm_use (imm_use_iterator *imm)
  306. {
  307. use_operand_p old = imm->imm_use;
  308. /* If this assertion fails, it indicates the 'next' pointer has changed
  309. since the last bump. This indicates that the list is being modified
  310. via stmt changes, or SET_USE, or somesuch thing, and you need to be
  311. using the SAFE version of the iterator. */
  312. if (flag_checking)
  313. {
  314. gcc_assert (imm->iter_node.next == old->next);
  315. imm->iter_node.next = old->next->next;
  316. }
  317. imm->imm_use = old->next;
  318. if (end_readonly_imm_use_p (imm))
  319. return NULL_USE_OPERAND_P;
  320. return imm->imm_use;
  321. }
  322. /* Return true if VAR has no nondebug uses. */
  323. static inline bool
  324. has_zero_uses (const_tree var)
  325. {
  326. const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var));
  327. const ssa_use_operand_t *ptr;
  328. for (ptr = head->next; ptr != head; ptr = ptr->next)
  329. if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr)))
  330. return false;
  331. return true;
  332. }
  333. /* Return true if VAR has a single nondebug use. */
  334. static inline bool
  335. has_single_use (const_tree var)
  336. {
  337. const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var));
  338. const ssa_use_operand_t *ptr;
  339. bool single = false;
  340. for (ptr = head->next; ptr != head; ptr = ptr->next)
  341. if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
  342. {
  343. if (single)
  344. return false;
  345. else
  346. single = true;
  347. }
  348. return single;
  349. }
  350. /* If VAR has only a single immediate nondebug use, return true, and
  351. set USE_P and STMT to the use pointer and stmt of occurrence. */
  352. static inline bool
  353. single_imm_use (const_tree var, use_operand_p *use_p, gimple **stmt)
  354. {
  355. const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
  356. /* If there aren't any uses whatsoever, we're done. */
  357. if (ptr == ptr->next)
  358. {
  359. return_false:
  360. *use_p = NULL_USE_OPERAND_P;
  361. *stmt = NULL;
  362. return false;
  363. }
  364. /* If there's a single use, check that it's not a debug stmt. */
  365. if (ptr == ptr->next->next)
  366. {
  367. if (USE_STMT (ptr->next) && !is_gimple_debug (USE_STMT (ptr->next)))
  368. {
  369. *use_p = ptr->next;
  370. *stmt = ptr->next->loc.stmt;
  371. return true;
  372. }
  373. else
  374. goto return_false;
  375. }
  376. return single_imm_use_1 (ptr, use_p, stmt);
  377. }
  378. /* Return the number of nondebug immediate uses of VAR. */
  379. static inline unsigned int
  380. num_imm_uses (const_tree var)
  381. {
  382. const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
  383. const ssa_use_operand_t *ptr;
  384. unsigned int num = 0;
  385. if (!MAY_HAVE_DEBUG_BIND_STMTS)
  386. {
  387. for (ptr = start->next; ptr != start; ptr = ptr->next)
  388. if (USE_STMT (ptr))
  389. num++;
  390. }
  391. else
  392. for (ptr = start->next; ptr != start; ptr = ptr->next)
  393. if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr)))
  394. num++;
  395. return num;
  396. }
  397. /* ----------------------------------------------------------------------- */
  398. /* The following set of routines are used to iterator over various type of
  399. SSA operands. */
  400. /* Return true if PTR is finished iterating. */
  401. static inline bool
  402. op_iter_done (const ssa_op_iter *ptr)
  403. {
  404. return ptr->done;
  405. }
  406. /* Get the next iterator use value for PTR. */
  407. static inline use_operand_p
  408. op_iter_next_use (ssa_op_iter *ptr)
  409. {
  410. use_operand_p use_p;
  411. gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
  412. if (ptr->uses)
  413. {
  414. use_p = USE_OP_PTR (ptr->uses);
  415. ptr->uses = ptr->uses->next;
  416. return use_p;
  417. }
  418. if (ptr->i < ptr->numops)
  419. {
  420. return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
  421. }
  422. ptr->done = true;
  423. return NULL_USE_OPERAND_P;
  424. }
  425. /* Get the next iterator def value for PTR. */
  426. static inline def_operand_p
  427. op_iter_next_def (ssa_op_iter *ptr)
  428. {
  429. gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
  430. if (ptr->flags & SSA_OP_VDEF)
  431. {
  432. tree *p;
  433. ptr->flags &= ~SSA_OP_VDEF;
  434. p = gimple_vdef_ptr (ptr->stmt);
  435. if (p && *p)
  436. return p;
  437. }
  438. if (ptr->flags & SSA_OP_DEF)
  439. {
  440. while (ptr->i < ptr->numops)
  441. {
  442. tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
  443. ptr->i++;
  444. if (*val)
  445. {
  446. if (TREE_CODE (*val) == TREE_LIST)
  447. val = &TREE_VALUE (*val);
  448. if (TREE_CODE (*val) == SSA_NAME
  449. || is_gimple_reg (*val))
  450. return val;
  451. }
  452. }
  453. ptr->flags &= ~SSA_OP_DEF;
  454. }
  455. ptr->done = true;
  456. return NULL_DEF_OPERAND_P;
  457. }
  458. /* Get the next iterator tree value for PTR. */
  459. static inline tree
  460. op_iter_next_tree (ssa_op_iter *ptr)
  461. {
  462. tree val;
  463. gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
  464. if (ptr->uses)
  465. {
  466. val = USE_OP (ptr->uses);
  467. ptr->uses = ptr->uses->next;
  468. return val;
  469. }
  470. if (ptr->flags & SSA_OP_VDEF)
  471. {
  472. ptr->flags &= ~SSA_OP_VDEF;
  473. if ((val = gimple_vdef (ptr->stmt)))
  474. return val;
  475. }
  476. if (ptr->flags & SSA_OP_DEF)
  477. {
  478. while (ptr->i < ptr->numops)
  479. {
  480. val = gimple_op (ptr->stmt, ptr->i);
  481. ptr->i++;
  482. if (val)
  483. {
  484. if (TREE_CODE (val) == TREE_LIST)
  485. val = TREE_VALUE (val);
  486. if (TREE_CODE (val) == SSA_NAME
  487. || is_gimple_reg (val))
  488. return val;
  489. }
  490. }
  491. ptr->flags &= ~SSA_OP_DEF;
  492. }
  493. ptr->done = true;
  494. return NULL_TREE;
  495. }
  496. /* This functions clears the iterator PTR, and marks it done. This is normally
  497. used to prevent warnings in the compile about might be uninitialized
  498. components. */
  499. static inline void
  500. clear_and_done_ssa_iter (ssa_op_iter *ptr)
  501. {
  502. ptr->i = 0;
  503. ptr->numops = 0;
  504. ptr->uses = NULL;
  505. ptr->iter_type = ssa_op_iter_none;
  506. ptr->stmt = NULL;
  507. ptr->done = true;
  508. ptr->flags = 0;
  509. }
  510. /* Initialize the iterator PTR to the virtual defs in STMT. */
  511. static inline void
  512. op_iter_init (ssa_op_iter *ptr, gimple *stmt, int flags)
  513. {
  514. /* PHI nodes require a different iterator initialization path. We
  515. do not support iterating over virtual defs or uses without
  516. iterating over defs or uses at the same time. */
  517. gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
  518. && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
  519. && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
  520. ptr->numops = 0;
  521. if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
  522. {
  523. switch (gimple_code (stmt))
  524. {
  525. case GIMPLE_ASSIGN:
  526. case GIMPLE_CALL:
  527. ptr->numops = 1;
  528. break;
  529. case GIMPLE_ASM:
  530. ptr->numops = gimple_asm_noutputs (as_a <gasm *> (stmt));
  531. break;
  532. case GIMPLE_TRANSACTION:
  533. ptr->numops = 0;
  534. flags &= ~SSA_OP_DEF;
  535. break;
  536. default:
  537. ptr->numops = 0;
  538. flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
  539. break;
  540. }
  541. }
  542. ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
  543. if (!(flags & SSA_OP_VUSE)
  544. && ptr->uses
  545. && gimple_vuse (stmt) != NULL_TREE)
  546. ptr->uses = ptr->uses->next;
  547. ptr->done = false;
  548. ptr->i = 0;
  549. ptr->stmt = stmt;
  550. ptr->flags = flags;
  551. }
  552. /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
  553. the first use. */
  554. static inline use_operand_p
  555. op_iter_init_use (ssa_op_iter *ptr, gimple *stmt, int flags)
  556. {
  557. gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
  558. && (flags & SSA_OP_USE));
  559. op_iter_init (ptr, stmt, flags);
  560. ptr->iter_type = ssa_op_iter_use;
  561. return op_iter_next_use (ptr);
  562. }
  563. /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
  564. the first def. */
  565. static inline def_operand_p
  566. op_iter_init_def (ssa_op_iter *ptr, gimple *stmt, int flags)
  567. {
  568. gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
  569. && (flags & SSA_OP_DEF));
  570. op_iter_init (ptr, stmt, flags);
  571. ptr->iter_type = ssa_op_iter_def;
  572. return op_iter_next_def (ptr);
  573. }
  574. /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
  575. the first operand as a tree. */
  576. static inline tree
  577. op_iter_init_tree (ssa_op_iter *ptr, gimple *stmt, int flags)
  578. {
  579. op_iter_init (ptr, stmt, flags);
  580. ptr->iter_type = ssa_op_iter_tree;
  581. return op_iter_next_tree (ptr);
  582. }
  583. /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
  584. return NULL. */
  585. static inline tree
  586. single_ssa_tree_operand (gimple *stmt, int flags)
  587. {
  588. tree var;
  589. ssa_op_iter iter;
  590. var = op_iter_init_tree (&iter, stmt, flags);
  591. if (op_iter_done (&iter))
  592. return NULL_TREE;
  593. op_iter_next_tree (&iter);
  594. if (op_iter_done (&iter))
  595. return var;
  596. return NULL_TREE;
  597. }
  598. /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
  599. return NULL. */
  600. static inline use_operand_p
  601. single_ssa_use_operand (gimple *stmt, int flags)
  602. {
  603. use_operand_p var;
  604. ssa_op_iter iter;
  605. var = op_iter_init_use (&iter, stmt, flags);
  606. if (op_iter_done (&iter))
  607. return NULL_USE_OPERAND_P;
  608. op_iter_next_use (&iter);
  609. if (op_iter_done (&iter))
  610. return var;
  611. return NULL_USE_OPERAND_P;
  612. }
  613. /* Return the single virtual use operand in STMT if present. Otherwise
  614. return NULL. */
  615. static inline use_operand_p
  616. ssa_vuse_operand (gimple *stmt)
  617. {
  618. if (! gimple_vuse (stmt))
  619. return NULL_USE_OPERAND_P;
  620. return USE_OP_PTR (gimple_use_ops (stmt));
  621. }
  622. /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
  623. return NULL. */
  624. static inline def_operand_p
  625. single_ssa_def_operand (gimple *stmt, int flags)
  626. {
  627. def_operand_p var;
  628. ssa_op_iter iter;
  629. var = op_iter_init_def (&iter, stmt, flags);
  630. if (op_iter_done (&iter))
  631. return NULL_DEF_OPERAND_P;
  632. op_iter_next_def (&iter);
  633. if (op_iter_done (&iter))
  634. return var;
  635. return NULL_DEF_OPERAND_P;
  636. }
  637. /* Return true if there are zero operands in STMT matching the type
  638. given in FLAGS. */
  639. static inline bool
  640. zero_ssa_operands (gimple *stmt, int flags)
  641. {
  642. ssa_op_iter iter;
  643. op_iter_init_tree (&iter, stmt, flags);
  644. return op_iter_done (&iter);
  645. }
  646. /* Return the number of operands matching FLAGS in STMT. */
  647. static inline int
  648. num_ssa_operands (gimple *stmt, int flags)
  649. {
  650. ssa_op_iter iter;
  651. tree t;
  652. int num = 0;
  653. gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
  654. FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
  655. num++;
  656. return num;
  657. }
  658. /* If there is a single DEF in the PHI node which matches FLAG, return it.
  659. Otherwise return NULL_DEF_OPERAND_P. */
  660. static inline tree
  661. single_phi_def (gphi *stmt, int flags)
  662. {
  663. tree def = PHI_RESULT (stmt);
  664. if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
  665. return def;
  666. if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
  667. return def;
  668. return NULL_TREE;
  669. }
  670. /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
  671. be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
  672. static inline use_operand_p
  673. op_iter_init_phiuse (ssa_op_iter *ptr, gphi *phi, int flags)
  674. {
  675. tree phi_def = gimple_phi_result (phi);
  676. int comp;
  677. clear_and_done_ssa_iter (ptr);
  678. ptr->done = false;
  679. gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
  680. comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
  681. /* If the PHI node doesn't the operand type we care about, we're done. */
  682. if ((flags & comp) == 0)
  683. {
  684. ptr->done = true;
  685. return NULL_USE_OPERAND_P;
  686. }
  687. ptr->stmt = phi;
  688. ptr->numops = gimple_phi_num_args (phi);
  689. ptr->iter_type = ssa_op_iter_use;
  690. ptr->flags = flags;
  691. return op_iter_next_use (ptr);
  692. }
  693. /* Start an iterator for a PHI definition. */
  694. static inline def_operand_p
  695. op_iter_init_phidef (ssa_op_iter *ptr, gphi *phi, int flags)
  696. {
  697. tree phi_def = PHI_RESULT (phi);
  698. int comp;
  699. clear_and_done_ssa_iter (ptr);
  700. ptr->done = false;
  701. gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
  702. comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
  703. /* If the PHI node doesn't have the operand type we care about,
  704. we're done. */
  705. if ((flags & comp) == 0)
  706. {
  707. ptr->done = true;
  708. return NULL_DEF_OPERAND_P;
  709. }
  710. ptr->iter_type = ssa_op_iter_def;
  711. /* The first call to op_iter_next_def will terminate the iterator since
  712. all the fields are NULL. Simply return the result here as the first and
  713. therefore only result. */
  714. return PHI_RESULT_PTR (phi);
  715. }
  716. /* Return true is IMM has reached the end of the immediate use stmt list. */
  717. static inline bool
  718. end_imm_use_stmt_p (const imm_use_iterator *imm)
  719. {
  720. return (imm->imm_use == imm->end_p);
  721. }
  722. /* Finished the traverse of an immediate use stmt list IMM by removing the
  723. placeholder node from the list. */
  724. static inline void
  725. end_imm_use_stmt_traverse (imm_use_iterator *imm)
  726. {
  727. delink_imm_use (&(imm->iter_node));
  728. }
  729. /* Immediate use traversal of uses within a stmt require that all the
  730. uses on a stmt be sequentially listed. This routine is used to build up
  731. this sequential list by adding USE_P to the end of the current list
  732. currently delimited by HEAD and LAST_P. The new LAST_P value is
  733. returned. */
  734. static inline use_operand_p
  735. move_use_after_head (use_operand_p use_p, use_operand_p head,
  736. use_operand_p last_p)
  737. {
  738. gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
  739. /* Skip head when we find it. */
  740. if (use_p != head)
  741. {
  742. /* If use_p is already linked in after last_p, continue. */
  743. if (last_p->next == use_p)
  744. last_p = use_p;
  745. else
  746. {
  747. /* Delink from current location, and link in at last_p. */
  748. delink_imm_use (use_p);
  749. link_imm_use_to_list (use_p, last_p);
  750. last_p = use_p;
  751. }
  752. }
  753. return last_p;
  754. }
  755. /* This routine will relink all uses with the same stmt as HEAD into the list
  756. immediately following HEAD for iterator IMM. */
  757. static inline void
  758. link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
  759. {
  760. use_operand_p use_p;
  761. use_operand_p last_p = head;
  762. gimple *head_stmt = USE_STMT (head);
  763. tree use = USE_FROM_PTR (head);
  764. ssa_op_iter op_iter;
  765. int flag;
  766. /* Only look at virtual or real uses, depending on the type of HEAD. */
  767. flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
  768. if (gphi *phi = dyn_cast <gphi *> (head_stmt))
  769. {
  770. FOR_EACH_PHI_ARG (use_p, phi, op_iter, flag)
  771. if (USE_FROM_PTR (use_p) == use)
  772. last_p = move_use_after_head (use_p, head, last_p);
  773. }
  774. else
  775. {
  776. if (flag == SSA_OP_USE)
  777. {
  778. FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
  779. if (USE_FROM_PTR (use_p) == use)
  780. last_p = move_use_after_head (use_p, head, last_p);
  781. }
  782. else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
  783. {
  784. if (USE_FROM_PTR (use_p) == use)
  785. last_p = move_use_after_head (use_p, head, last_p);
  786. }
  787. }
  788. /* Link iter node in after last_p. */
  789. if (imm->iter_node.prev != NULL)
  790. delink_imm_use (&imm->iter_node);
  791. link_imm_use_to_list (&(imm->iter_node), last_p);
  792. }
  793. /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
  794. static inline gimple *
  795. first_imm_use_stmt (imm_use_iterator *imm, tree var)
  796. {
  797. imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
  798. imm->imm_use = imm->end_p->next;
  799. imm->next_imm_name = NULL_USE_OPERAND_P;
  800. /* iter_node is used as a marker within the immediate use list to indicate
  801. where the end of the current stmt's uses are. Initialize it to NULL
  802. stmt and use, which indicates a marker node. */
  803. imm->iter_node.prev = NULL_USE_OPERAND_P;
  804. imm->iter_node.next = NULL_USE_OPERAND_P;
  805. imm->iter_node.loc.stmt = NULL;
  806. imm->iter_node.use = NULL;
  807. if (end_imm_use_stmt_p (imm))
  808. return NULL;
  809. link_use_stmts_after (imm->imm_use, imm);
  810. return USE_STMT (imm->imm_use);
  811. }
  812. /* Bump IMM to the next stmt which has a use of var. */
  813. static inline gimple *
  814. next_imm_use_stmt (imm_use_iterator *imm)
  815. {
  816. imm->imm_use = imm->iter_node.next;
  817. if (end_imm_use_stmt_p (imm))
  818. {
  819. if (imm->iter_node.prev != NULL)
  820. delink_imm_use (&imm->iter_node);
  821. return NULL;
  822. }
  823. link_use_stmts_after (imm->imm_use, imm);
  824. return USE_STMT (imm->imm_use);
  825. }
  826. /* This routine will return the first use on the stmt IMM currently refers
  827. to. */
  828. static inline use_operand_p
  829. first_imm_use_on_stmt (imm_use_iterator *imm)
  830. {
  831. imm->next_imm_name = imm->imm_use->next;
  832. return imm->imm_use;
  833. }
  834. /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
  835. static inline bool
  836. end_imm_use_on_stmt_p (const imm_use_iterator *imm)
  837. {
  838. return (imm->imm_use == &(imm->iter_node));
  839. }
  840. /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
  841. static inline use_operand_p
  842. next_imm_use_on_stmt (imm_use_iterator *imm)
  843. {
  844. imm->imm_use = imm->next_imm_name;
  845. if (end_imm_use_on_stmt_p (imm))
  846. return NULL_USE_OPERAND_P;
  847. else
  848. {
  849. imm->next_imm_name = imm->imm_use->next;
  850. return imm->imm_use;
  851. }
  852. }
  853. /* Delink all immediate_use information for STMT. */
  854. static inline void
  855. delink_stmt_imm_use (gimple *stmt)
  856. {
  857. ssa_op_iter iter;
  858. use_operand_p use_p;
  859. if (ssa_operands_active (cfun))
  860. FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
  861. delink_imm_use (use_p);
  862. }
  863. #endif /* GCC_TREE_SSA_ITERATORS_H */