Mercurial > hg > CbC > CbC_gcc
comparison gcc/jump.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Optimize jump instructions, for GNU compiler. | |
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997 | |
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 /* This is the pathetic reminder of old fame of the jump-optimization pass | |
23 of the compiler. Now it contains basically a set of utility functions to | |
24 operate with jumps. | |
25 | |
26 Each CODE_LABEL has a count of the times it is used | |
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN | |
28 has one label that it refers to stored in the | |
29 JUMP_LABEL internal field. With this we can detect labels that | |
30 become unused because of the deletion of all the jumps that | |
31 formerly used them. The JUMP_LABEL info is sometimes looked | |
32 at by later passes. | |
33 | |
34 The subroutines redirect_jump and invert_jump are used | |
35 from other passes as well. */ | |
36 | |
37 #include "config.h" | |
38 #include "system.h" | |
39 #include "coretypes.h" | |
40 #include "tm.h" | |
41 #include "rtl.h" | |
42 #include "tm_p.h" | |
43 #include "flags.h" | |
44 #include "hard-reg-set.h" | |
45 #include "regs.h" | |
46 #include "insn-config.h" | |
47 #include "insn-attr.h" | |
48 #include "recog.h" | |
49 #include "function.h" | |
50 #include "expr.h" | |
51 #include "real.h" | |
52 #include "except.h" | |
53 #include "diagnostic.h" | |
54 #include "toplev.h" | |
55 #include "reload.h" | |
56 #include "predict.h" | |
57 #include "timevar.h" | |
58 #include "tree-pass.h" | |
59 #include "target.h" | |
60 | |
61 /* Optimize jump y; x: ... y: jumpif... x? | |
62 Don't know if it is worth bothering with. */ | |
63 /* Optimize two cases of conditional jump to conditional jump? | |
64 This can never delete any instruction or make anything dead, | |
65 or even change what is live at any point. | |
66 So perhaps let combiner do it. */ | |
67 | |
68 static void init_label_info (rtx); | |
69 static void mark_all_labels (rtx); | |
70 static void mark_jump_label_1 (rtx, rtx, bool, bool); | |
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx); | |
72 static int invert_exp_1 (rtx, rtx); | |
73 static int returnjump_p_1 (rtx *, void *); | |
74 | |
75 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET | |
76 notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping | |
77 instructions and jumping insns that have labels as operands | |
78 (e.g. cbranchsi4). */ | |
79 void | |
80 rebuild_jump_labels (rtx f) | |
81 { | |
82 rtx insn; | |
83 | |
84 timevar_push (TV_REBUILD_JUMP); | |
85 init_label_info (f); | |
86 mark_all_labels (f); | |
87 | |
88 /* Keep track of labels used from static data; we don't track them | |
89 closely enough to delete them here, so make sure their reference | |
90 count doesn't drop to zero. */ | |
91 | |
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1)) | |
93 if (LABEL_P (XEXP (insn, 0))) | |
94 LABEL_NUSES (XEXP (insn, 0))++; | |
95 timevar_pop (TV_REBUILD_JUMP); | |
96 } | |
97 | |
98 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a | |
99 non-fallthru insn. This is not generally true, as multiple barriers | |
100 may have crept in, or the BARRIER may be separated from the last | |
101 real insn by one or more NOTEs. | |
102 | |
103 This simple pass moves barriers and removes duplicates so that the | |
104 old code is happy. | |
105 */ | |
106 unsigned int | |
107 cleanup_barriers (void) | |
108 { | |
109 rtx insn, next, prev; | |
110 for (insn = get_insns (); insn; insn = next) | |
111 { | |
112 next = NEXT_INSN (insn); | |
113 if (BARRIER_P (insn)) | |
114 { | |
115 prev = prev_nonnote_insn (insn); | |
116 if (BARRIER_P (prev)) | |
117 delete_insn (insn); | |
118 else if (prev != PREV_INSN (insn)) | |
119 reorder_insns (insn, insn, prev); | |
120 } | |
121 } | |
122 return 0; | |
123 } | |
124 | |
125 struct rtl_opt_pass pass_cleanup_barriers = | |
126 { | |
127 { | |
128 RTL_PASS, | |
129 "barriers", /* name */ | |
130 NULL, /* gate */ | |
131 cleanup_barriers, /* execute */ | |
132 NULL, /* sub */ | |
133 NULL, /* next */ | |
134 0, /* static_pass_number */ | |
135 0, /* tv_id */ | |
136 0, /* properties_required */ | |
137 0, /* properties_provided */ | |
138 0, /* properties_destroyed */ | |
139 0, /* todo_flags_start */ | |
140 TODO_dump_func /* todo_flags_finish */ | |
141 } | |
142 }; | |
143 | |
144 | |
145 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET | |
146 for remaining targets for JUMP_P. Delete any REG_LABEL_OPERAND | |
147 notes whose labels don't occur in the insn any more. */ | |
148 | |
149 static void | |
150 init_label_info (rtx f) | |
151 { | |
152 rtx insn; | |
153 | |
154 for (insn = f; insn; insn = NEXT_INSN (insn)) | |
155 { | |
156 if (LABEL_P (insn)) | |
157 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0); | |
158 | |
159 /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are | |
160 sticky and not reset here; that way we won't lose association | |
161 with a label when e.g. the source for a target register | |
162 disappears out of reach for targets that may use jump-target | |
163 registers. Jump transformations are supposed to transform | |
164 any REG_LABEL_TARGET notes. The target label reference in a | |
165 branch may disappear from the branch (and from the | |
166 instruction before it) for other reasons, like register | |
167 allocation. */ | |
168 | |
169 if (INSN_P (insn)) | |
170 { | |
171 rtx note, next; | |
172 | |
173 for (note = REG_NOTES (insn); note; note = next) | |
174 { | |
175 next = XEXP (note, 1); | |
176 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND | |
177 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn))) | |
178 remove_note (insn, note); | |
179 } | |
180 } | |
181 } | |
182 } | |
183 | |
184 /* Mark the label each jump jumps to. | |
185 Combine consecutive labels, and count uses of labels. */ | |
186 | |
187 static void | |
188 mark_all_labels (rtx f) | |
189 { | |
190 rtx insn; | |
191 rtx prev_nonjump_insn = NULL; | |
192 | |
193 for (insn = f; insn; insn = NEXT_INSN (insn)) | |
194 if (INSN_P (insn)) | |
195 { | |
196 mark_jump_label (PATTERN (insn), insn, 0); | |
197 | |
198 /* If the previous non-jump insn sets something to a label, | |
199 something that this jump insn uses, make that label the primary | |
200 target of this insn if we don't yet have any. That previous | |
201 insn must be a single_set and not refer to more than one label. | |
202 The jump insn must not refer to other labels as jump targets | |
203 and must be a plain (set (pc) ...), maybe in a parallel, and | |
204 may refer to the item being set only directly or as one of the | |
205 arms in an IF_THEN_ELSE. */ | |
206 if (! INSN_DELETED_P (insn) | |
207 && JUMP_P (insn) | |
208 && JUMP_LABEL (insn) == NULL) | |
209 { | |
210 rtx label_note = NULL; | |
211 rtx pc = pc_set (insn); | |
212 rtx pc_src = pc != NULL ? SET_SRC (pc) : NULL; | |
213 | |
214 if (prev_nonjump_insn != NULL) | |
215 label_note | |
216 = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL); | |
217 | |
218 if (label_note != NULL && pc_src != NULL) | |
219 { | |
220 rtx label_set = single_set (prev_nonjump_insn); | |
221 rtx label_dest | |
222 = label_set != NULL ? SET_DEST (label_set) : NULL; | |
223 | |
224 if (label_set != NULL | |
225 /* The source must be the direct LABEL_REF, not a | |
226 PLUS, UNSPEC, IF_THEN_ELSE etc. */ | |
227 && GET_CODE (SET_SRC (label_set)) == LABEL_REF | |
228 && (rtx_equal_p (label_dest, pc_src) | |
229 || (GET_CODE (pc_src) == IF_THEN_ELSE | |
230 && (rtx_equal_p (label_dest, XEXP (pc_src, 1)) | |
231 || rtx_equal_p (label_dest, | |
232 XEXP (pc_src, 2)))))) | |
233 | |
234 { | |
235 /* The CODE_LABEL referred to in the note must be the | |
236 CODE_LABEL in the LABEL_REF of the "set". We can | |
237 conveniently use it for the marker function, which | |
238 requires a LABEL_REF wrapping. */ | |
239 gcc_assert (XEXP (label_note, 0) | |
240 == XEXP (SET_SRC (label_set), 0)); | |
241 | |
242 mark_jump_label_1 (label_set, insn, false, true); | |
243 gcc_assert (JUMP_LABEL (insn) | |
244 == XEXP (SET_SRC (label_set), 0)); | |
245 } | |
246 } | |
247 } | |
248 else if (! INSN_DELETED_P (insn)) | |
249 prev_nonjump_insn = insn; | |
250 } | |
251 else if (LABEL_P (insn)) | |
252 prev_nonjump_insn = NULL; | |
253 | |
254 /* If we are in cfglayout mode, there may be non-insns between the | |
255 basic blocks. If those non-insns represent tablejump data, they | |
256 contain label references that we must record. */ | |
257 if (current_ir_type () == IR_RTL_CFGLAYOUT) | |
258 { | |
259 basic_block bb; | |
260 rtx insn; | |
261 FOR_EACH_BB (bb) | |
262 { | |
263 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn)) | |
264 if (INSN_P (insn)) | |
265 { | |
266 gcc_assert (JUMP_TABLE_DATA_P (insn)); | |
267 mark_jump_label (PATTERN (insn), insn, 0); | |
268 } | |
269 | |
270 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn)) | |
271 if (INSN_P (insn)) | |
272 { | |
273 gcc_assert (JUMP_TABLE_DATA_P (insn)); | |
274 mark_jump_label (PATTERN (insn), insn, 0); | |
275 } | |
276 } | |
277 } | |
278 } | |
279 | |
280 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code | |
281 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN. | |
282 UNKNOWN may be returned in case we are having CC_MODE compare and we don't | |
283 know whether it's source is floating point or integer comparison. Machine | |
284 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros | |
285 to help this function avoid overhead in these cases. */ | |
286 enum rtx_code | |
287 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0, | |
288 const_rtx arg1, const_rtx insn) | |
289 { | |
290 enum machine_mode mode; | |
291 | |
292 /* If this is not actually a comparison, we can't reverse it. */ | |
293 if (GET_RTX_CLASS (code) != RTX_COMPARE | |
294 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE) | |
295 return UNKNOWN; | |
296 | |
297 mode = GET_MODE (arg0); | |
298 if (mode == VOIDmode) | |
299 mode = GET_MODE (arg1); | |
300 | |
301 /* First see if machine description supplies us way to reverse the | |
302 comparison. Give it priority over everything else to allow | |
303 machine description to do tricks. */ | |
304 if (GET_MODE_CLASS (mode) == MODE_CC | |
305 && REVERSIBLE_CC_MODE (mode)) | |
306 { | |
307 #ifdef REVERSE_CONDITION | |
308 return REVERSE_CONDITION (code, mode); | |
309 #endif | |
310 return reverse_condition (code); | |
311 } | |
312 | |
313 /* Try a few special cases based on the comparison code. */ | |
314 switch (code) | |
315 { | |
316 case GEU: | |
317 case GTU: | |
318 case LEU: | |
319 case LTU: | |
320 case NE: | |
321 case EQ: | |
322 /* It is always safe to reverse EQ and NE, even for the floating | |
323 point. Similarly the unsigned comparisons are never used for | |
324 floating point so we can reverse them in the default way. */ | |
325 return reverse_condition (code); | |
326 case ORDERED: | |
327 case UNORDERED: | |
328 case LTGT: | |
329 case UNEQ: | |
330 /* In case we already see unordered comparison, we can be sure to | |
331 be dealing with floating point so we don't need any more tests. */ | |
332 return reverse_condition_maybe_unordered (code); | |
333 case UNLT: | |
334 case UNLE: | |
335 case UNGT: | |
336 case UNGE: | |
337 /* We don't have safe way to reverse these yet. */ | |
338 return UNKNOWN; | |
339 default: | |
340 break; | |
341 } | |
342 | |
343 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0)) | |
344 { | |
345 const_rtx prev; | |
346 /* Try to search for the comparison to determine the real mode. | |
347 This code is expensive, but with sane machine description it | |
348 will be never used, since REVERSIBLE_CC_MODE will return true | |
349 in all cases. */ | |
350 if (! insn) | |
351 return UNKNOWN; | |
352 | |
353 /* These CONST_CAST's are okay because prev_nonnote_insn just | |
354 returns its argument and we assign it to a const_rtx | |
355 variable. */ | |
356 for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn)); | |
357 prev != 0 && !LABEL_P (prev); | |
358 prev = prev_nonnote_insn (CONST_CAST_RTX(prev))) | |
359 { | |
360 const_rtx set = set_of (arg0, prev); | |
361 if (set && GET_CODE (set) == SET | |
362 && rtx_equal_p (SET_DEST (set), arg0)) | |
363 { | |
364 rtx src = SET_SRC (set); | |
365 | |
366 if (GET_CODE (src) == COMPARE) | |
367 { | |
368 rtx comparison = src; | |
369 arg0 = XEXP (src, 0); | |
370 mode = GET_MODE (arg0); | |
371 if (mode == VOIDmode) | |
372 mode = GET_MODE (XEXP (comparison, 1)); | |
373 break; | |
374 } | |
375 /* We can get past reg-reg moves. This may be useful for model | |
376 of i387 comparisons that first move flag registers around. */ | |
377 if (REG_P (src)) | |
378 { | |
379 arg0 = src; | |
380 continue; | |
381 } | |
382 } | |
383 /* If register is clobbered in some ununderstandable way, | |
384 give up. */ | |
385 if (set) | |
386 return UNKNOWN; | |
387 } | |
388 } | |
389 | |
390 /* Test for an integer condition, or a floating-point comparison | |
391 in which NaNs can be ignored. */ | |
392 if (GET_CODE (arg0) == CONST_INT | |
393 || (GET_MODE (arg0) != VOIDmode | |
394 && GET_MODE_CLASS (mode) != MODE_CC | |
395 && !HONOR_NANS (mode))) | |
396 return reverse_condition (code); | |
397 | |
398 return UNKNOWN; | |
399 } | |
400 | |
401 /* A wrapper around the previous function to take COMPARISON as rtx | |
402 expression. This simplifies many callers. */ | |
403 enum rtx_code | |
404 reversed_comparison_code (const_rtx comparison, const_rtx insn) | |
405 { | |
406 if (!COMPARISON_P (comparison)) | |
407 return UNKNOWN; | |
408 return reversed_comparison_code_parts (GET_CODE (comparison), | |
409 XEXP (comparison, 0), | |
410 XEXP (comparison, 1), insn); | |
411 } | |
412 | |
413 /* Return comparison with reversed code of EXP. | |
414 Return NULL_RTX in case we fail to do the reversal. */ | |
415 rtx | |
416 reversed_comparison (const_rtx exp, enum machine_mode mode) | |
417 { | |
418 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX); | |
419 if (reversed_code == UNKNOWN) | |
420 return NULL_RTX; | |
421 else | |
422 return simplify_gen_relational (reversed_code, mode, VOIDmode, | |
423 XEXP (exp, 0), XEXP (exp, 1)); | |
424 } | |
425 | |
426 | |
427 /* Given an rtx-code for a comparison, return the code for the negated | |
428 comparison. If no such code exists, return UNKNOWN. | |
429 | |
430 WATCH OUT! reverse_condition is not safe to use on a jump that might | |
431 be acting on the results of an IEEE floating point comparison, because | |
432 of the special treatment of non-signaling nans in comparisons. | |
433 Use reversed_comparison_code instead. */ | |
434 | |
435 enum rtx_code | |
436 reverse_condition (enum rtx_code code) | |
437 { | |
438 switch (code) | |
439 { | |
440 case EQ: | |
441 return NE; | |
442 case NE: | |
443 return EQ; | |
444 case GT: | |
445 return LE; | |
446 case GE: | |
447 return LT; | |
448 case LT: | |
449 return GE; | |
450 case LE: | |
451 return GT; | |
452 case GTU: | |
453 return LEU; | |
454 case GEU: | |
455 return LTU; | |
456 case LTU: | |
457 return GEU; | |
458 case LEU: | |
459 return GTU; | |
460 case UNORDERED: | |
461 return ORDERED; | |
462 case ORDERED: | |
463 return UNORDERED; | |
464 | |
465 case UNLT: | |
466 case UNLE: | |
467 case UNGT: | |
468 case UNGE: | |
469 case UNEQ: | |
470 case LTGT: | |
471 return UNKNOWN; | |
472 | |
473 default: | |
474 gcc_unreachable (); | |
475 } | |
476 } | |
477 | |
478 /* Similar, but we're allowed to generate unordered comparisons, which | |
479 makes it safe for IEEE floating-point. Of course, we have to recognize | |
480 that the target will support them too... */ | |
481 | |
482 enum rtx_code | |
483 reverse_condition_maybe_unordered (enum rtx_code code) | |
484 { | |
485 switch (code) | |
486 { | |
487 case EQ: | |
488 return NE; | |
489 case NE: | |
490 return EQ; | |
491 case GT: | |
492 return UNLE; | |
493 case GE: | |
494 return UNLT; | |
495 case LT: | |
496 return UNGE; | |
497 case LE: | |
498 return UNGT; | |
499 case LTGT: | |
500 return UNEQ; | |
501 case UNORDERED: | |
502 return ORDERED; | |
503 case ORDERED: | |
504 return UNORDERED; | |
505 case UNLT: | |
506 return GE; | |
507 case UNLE: | |
508 return GT; | |
509 case UNGT: | |
510 return LE; | |
511 case UNGE: | |
512 return LT; | |
513 case UNEQ: | |
514 return LTGT; | |
515 | |
516 default: | |
517 gcc_unreachable (); | |
518 } | |
519 } | |
520 | |
521 /* Similar, but return the code when two operands of a comparison are swapped. | |
522 This IS safe for IEEE floating-point. */ | |
523 | |
524 enum rtx_code | |
525 swap_condition (enum rtx_code code) | |
526 { | |
527 switch (code) | |
528 { | |
529 case EQ: | |
530 case NE: | |
531 case UNORDERED: | |
532 case ORDERED: | |
533 case UNEQ: | |
534 case LTGT: | |
535 return code; | |
536 | |
537 case GT: | |
538 return LT; | |
539 case GE: | |
540 return LE; | |
541 case LT: | |
542 return GT; | |
543 case LE: | |
544 return GE; | |
545 case GTU: | |
546 return LTU; | |
547 case GEU: | |
548 return LEU; | |
549 case LTU: | |
550 return GTU; | |
551 case LEU: | |
552 return GEU; | |
553 case UNLT: | |
554 return UNGT; | |
555 case UNLE: | |
556 return UNGE; | |
557 case UNGT: | |
558 return UNLT; | |
559 case UNGE: | |
560 return UNLE; | |
561 | |
562 default: | |
563 gcc_unreachable (); | |
564 } | |
565 } | |
566 | |
567 /* Given a comparison CODE, return the corresponding unsigned comparison. | |
568 If CODE is an equality comparison or already an unsigned comparison, | |
569 CODE is returned. */ | |
570 | |
571 enum rtx_code | |
572 unsigned_condition (enum rtx_code code) | |
573 { | |
574 switch (code) | |
575 { | |
576 case EQ: | |
577 case NE: | |
578 case GTU: | |
579 case GEU: | |
580 case LTU: | |
581 case LEU: | |
582 return code; | |
583 | |
584 case GT: | |
585 return GTU; | |
586 case GE: | |
587 return GEU; | |
588 case LT: | |
589 return LTU; | |
590 case LE: | |
591 return LEU; | |
592 | |
593 default: | |
594 gcc_unreachable (); | |
595 } | |
596 } | |
597 | |
598 /* Similarly, return the signed version of a comparison. */ | |
599 | |
600 enum rtx_code | |
601 signed_condition (enum rtx_code code) | |
602 { | |
603 switch (code) | |
604 { | |
605 case EQ: | |
606 case NE: | |
607 case GT: | |
608 case GE: | |
609 case LT: | |
610 case LE: | |
611 return code; | |
612 | |
613 case GTU: | |
614 return GT; | |
615 case GEU: | |
616 return GE; | |
617 case LTU: | |
618 return LT; | |
619 case LEU: | |
620 return LE; | |
621 | |
622 default: | |
623 gcc_unreachable (); | |
624 } | |
625 } | |
626 | |
627 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the | |
628 truth of CODE1 implies the truth of CODE2. */ | |
629 | |
630 int | |
631 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2) | |
632 { | |
633 /* UNKNOWN comparison codes can happen as a result of trying to revert | |
634 comparison codes. | |
635 They can't match anything, so we have to reject them here. */ | |
636 if (code1 == UNKNOWN || code2 == UNKNOWN) | |
637 return 0; | |
638 | |
639 if (code1 == code2) | |
640 return 1; | |
641 | |
642 switch (code1) | |
643 { | |
644 case UNEQ: | |
645 if (code2 == UNLE || code2 == UNGE) | |
646 return 1; | |
647 break; | |
648 | |
649 case EQ: | |
650 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU | |
651 || code2 == ORDERED) | |
652 return 1; | |
653 break; | |
654 | |
655 case UNLT: | |
656 if (code2 == UNLE || code2 == NE) | |
657 return 1; | |
658 break; | |
659 | |
660 case LT: | |
661 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT) | |
662 return 1; | |
663 break; | |
664 | |
665 case UNGT: | |
666 if (code2 == UNGE || code2 == NE) | |
667 return 1; | |
668 break; | |
669 | |
670 case GT: | |
671 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT) | |
672 return 1; | |
673 break; | |
674 | |
675 case GE: | |
676 case LE: | |
677 if (code2 == ORDERED) | |
678 return 1; | |
679 break; | |
680 | |
681 case LTGT: | |
682 if (code2 == NE || code2 == ORDERED) | |
683 return 1; | |
684 break; | |
685 | |
686 case LTU: | |
687 if (code2 == LEU || code2 == NE) | |
688 return 1; | |
689 break; | |
690 | |
691 case GTU: | |
692 if (code2 == GEU || code2 == NE) | |
693 return 1; | |
694 break; | |
695 | |
696 case UNORDERED: | |
697 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT | |
698 || code2 == UNGE || code2 == UNGT) | |
699 return 1; | |
700 break; | |
701 | |
702 default: | |
703 break; | |
704 } | |
705 | |
706 return 0; | |
707 } | |
708 | |
709 /* Return 1 if INSN is an unconditional jump and nothing else. */ | |
710 | |
711 int | |
712 simplejump_p (const_rtx insn) | |
713 { | |
714 return (JUMP_P (insn) | |
715 && GET_CODE (PATTERN (insn)) == SET | |
716 && GET_CODE (SET_DEST (PATTERN (insn))) == PC | |
717 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF); | |
718 } | |
719 | |
720 /* Return nonzero if INSN is a (possibly) conditional jump | |
721 and nothing more. | |
722 | |
723 Use of this function is deprecated, since we need to support combined | |
724 branch and compare insns. Use any_condjump_p instead whenever possible. */ | |
725 | |
726 int | |
727 condjump_p (const_rtx insn) | |
728 { | |
729 const_rtx x = PATTERN (insn); | |
730 | |
731 if (GET_CODE (x) != SET | |
732 || GET_CODE (SET_DEST (x)) != PC) | |
733 return 0; | |
734 | |
735 x = SET_SRC (x); | |
736 if (GET_CODE (x) == LABEL_REF) | |
737 return 1; | |
738 else | |
739 return (GET_CODE (x) == IF_THEN_ELSE | |
740 && ((GET_CODE (XEXP (x, 2)) == PC | |
741 && (GET_CODE (XEXP (x, 1)) == LABEL_REF | |
742 || GET_CODE (XEXP (x, 1)) == RETURN)) | |
743 || (GET_CODE (XEXP (x, 1)) == PC | |
744 && (GET_CODE (XEXP (x, 2)) == LABEL_REF | |
745 || GET_CODE (XEXP (x, 2)) == RETURN)))); | |
746 } | |
747 | |
748 /* Return nonzero if INSN is a (possibly) conditional jump inside a | |
749 PARALLEL. | |
750 | |
751 Use this function is deprecated, since we need to support combined | |
752 branch and compare insns. Use any_condjump_p instead whenever possible. */ | |
753 | |
754 int | |
755 condjump_in_parallel_p (const_rtx insn) | |
756 { | |
757 const_rtx x = PATTERN (insn); | |
758 | |
759 if (GET_CODE (x) != PARALLEL) | |
760 return 0; | |
761 else | |
762 x = XVECEXP (x, 0, 0); | |
763 | |
764 if (GET_CODE (x) != SET) | |
765 return 0; | |
766 if (GET_CODE (SET_DEST (x)) != PC) | |
767 return 0; | |
768 if (GET_CODE (SET_SRC (x)) == LABEL_REF) | |
769 return 1; | |
770 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE) | |
771 return 0; | |
772 if (XEXP (SET_SRC (x), 2) == pc_rtx | |
773 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF | |
774 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN)) | |
775 return 1; | |
776 if (XEXP (SET_SRC (x), 1) == pc_rtx | |
777 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF | |
778 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN)) | |
779 return 1; | |
780 return 0; | |
781 } | |
782 | |
783 /* Return set of PC, otherwise NULL. */ | |
784 | |
785 rtx | |
786 pc_set (const_rtx insn) | |
787 { | |
788 rtx pat; | |
789 if (!JUMP_P (insn)) | |
790 return NULL_RTX; | |
791 pat = PATTERN (insn); | |
792 | |
793 /* The set is allowed to appear either as the insn pattern or | |
794 the first set in a PARALLEL. */ | |
795 if (GET_CODE (pat) == PARALLEL) | |
796 pat = XVECEXP (pat, 0, 0); | |
797 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC) | |
798 return pat; | |
799 | |
800 return NULL_RTX; | |
801 } | |
802 | |
803 /* Return true when insn is an unconditional direct jump, | |
804 possibly bundled inside a PARALLEL. */ | |
805 | |
806 int | |
807 any_uncondjump_p (const_rtx insn) | |
808 { | |
809 const_rtx x = pc_set (insn); | |
810 if (!x) | |
811 return 0; | |
812 if (GET_CODE (SET_SRC (x)) != LABEL_REF) | |
813 return 0; | |
814 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) | |
815 return 0; | |
816 return 1; | |
817 } | |
818 | |
819 /* Return true when insn is a conditional jump. This function works for | |
820 instructions containing PC sets in PARALLELs. The instruction may have | |
821 various other effects so before removing the jump you must verify | |
822 onlyjump_p. | |
823 | |
824 Note that unlike condjump_p it returns false for unconditional jumps. */ | |
825 | |
826 int | |
827 any_condjump_p (const_rtx insn) | |
828 { | |
829 const_rtx x = pc_set (insn); | |
830 enum rtx_code a, b; | |
831 | |
832 if (!x) | |
833 return 0; | |
834 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE) | |
835 return 0; | |
836 | |
837 a = GET_CODE (XEXP (SET_SRC (x), 1)); | |
838 b = GET_CODE (XEXP (SET_SRC (x), 2)); | |
839 | |
840 return ((b == PC && (a == LABEL_REF || a == RETURN)) | |
841 || (a == PC && (b == LABEL_REF || b == RETURN))); | |
842 } | |
843 | |
844 /* Return the label of a conditional jump. */ | |
845 | |
846 rtx | |
847 condjump_label (const_rtx insn) | |
848 { | |
849 rtx x = pc_set (insn); | |
850 | |
851 if (!x) | |
852 return NULL_RTX; | |
853 x = SET_SRC (x); | |
854 if (GET_CODE (x) == LABEL_REF) | |
855 return x; | |
856 if (GET_CODE (x) != IF_THEN_ELSE) | |
857 return NULL_RTX; | |
858 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF) | |
859 return XEXP (x, 1); | |
860 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF) | |
861 return XEXP (x, 2); | |
862 return NULL_RTX; | |
863 } | |
864 | |
865 /* Return true if INSN is a (possibly conditional) return insn. */ | |
866 | |
867 static int | |
868 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED) | |
869 { | |
870 rtx x = *loc; | |
871 | |
872 return x && (GET_CODE (x) == RETURN | |
873 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x))); | |
874 } | |
875 | |
876 int | |
877 returnjump_p (rtx insn) | |
878 { | |
879 if (!JUMP_P (insn)) | |
880 return 0; | |
881 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL); | |
882 } | |
883 | |
884 /* Return true if INSN is a jump that only transfers control and | |
885 nothing more. */ | |
886 | |
887 int | |
888 onlyjump_p (const_rtx insn) | |
889 { | |
890 rtx set; | |
891 | |
892 if (!JUMP_P (insn)) | |
893 return 0; | |
894 | |
895 set = single_set (insn); | |
896 if (set == NULL) | |
897 return 0; | |
898 if (GET_CODE (SET_DEST (set)) != PC) | |
899 return 0; | |
900 if (side_effects_p (SET_SRC (set))) | |
901 return 0; | |
902 | |
903 return 1; | |
904 } | |
905 | |
906 #ifdef HAVE_cc0 | |
907 | |
908 /* Return nonzero if X is an RTX that only sets the condition codes | |
909 and has no side effects. */ | |
910 | |
911 int | |
912 only_sets_cc0_p (const_rtx x) | |
913 { | |
914 if (! x) | |
915 return 0; | |
916 | |
917 if (INSN_P (x)) | |
918 x = PATTERN (x); | |
919 | |
920 return sets_cc0_p (x) == 1 && ! side_effects_p (x); | |
921 } | |
922 | |
923 /* Return 1 if X is an RTX that does nothing but set the condition codes | |
924 and CLOBBER or USE registers. | |
925 Return -1 if X does explicitly set the condition codes, | |
926 but also does other things. */ | |
927 | |
928 int | |
929 sets_cc0_p (const_rtx x) | |
930 { | |
931 if (! x) | |
932 return 0; | |
933 | |
934 if (INSN_P (x)) | |
935 x = PATTERN (x); | |
936 | |
937 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx) | |
938 return 1; | |
939 if (GET_CODE (x) == PARALLEL) | |
940 { | |
941 int i; | |
942 int sets_cc0 = 0; | |
943 int other_things = 0; | |
944 for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
945 { | |
946 if (GET_CODE (XVECEXP (x, 0, i)) == SET | |
947 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx) | |
948 sets_cc0 = 1; | |
949 else if (GET_CODE (XVECEXP (x, 0, i)) == SET) | |
950 other_things = 1; | |
951 } | |
952 return ! sets_cc0 ? 0 : other_things ? -1 : 1; | |
953 } | |
954 return 0; | |
955 } | |
956 #endif | |
957 | |
958 /* Find all CODE_LABELs referred to in X, and increment their use | |
959 counts. If INSN is a JUMP_INSN and there is at least one | |
960 CODE_LABEL referenced in INSN as a jump target, then store the last | |
961 one in JUMP_LABEL (INSN). For a tablejump, this must be the label | |
962 for the ADDR_VEC. Store any other jump targets as REG_LABEL_TARGET | |
963 notes. If INSN is an INSN or a CALL_INSN or non-target operands of | |
964 a JUMP_INSN, and there is at least one CODE_LABEL referenced in | |
965 INSN, add a REG_LABEL_OPERAND note containing that label to INSN. | |
966 | |
967 Note that two labels separated by a loop-beginning note | |
968 must be kept distinct if we have not yet done loop-optimization, | |
969 because the gap between them is where loop-optimize | |
970 will want to move invariant code to. CROSS_JUMP tells us | |
971 that loop-optimization is done with. */ | |
972 | |
973 void | |
974 mark_jump_label (rtx x, rtx insn, int in_mem) | |
975 { | |
976 mark_jump_label_1 (x, insn, in_mem != 0, | |
977 (insn != NULL && x == PATTERN (insn) && JUMP_P (insn))); | |
978 } | |
979 | |
980 /* Worker function for mark_jump_label. IN_MEM is TRUE when X occurs | |
981 within a (MEM ...). IS_TARGET is TRUE when X is to be treated as a | |
982 jump-target; when the JUMP_LABEL field of INSN should be set or a | |
983 REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND | |
984 note. */ | |
985 | |
986 static void | |
987 mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target) | |
988 { | |
989 RTX_CODE code = GET_CODE (x); | |
990 int i; | |
991 const char *fmt; | |
992 | |
993 switch (code) | |
994 { | |
995 case PC: | |
996 case CC0: | |
997 case REG: | |
998 case CONST_INT: | |
999 case CONST_DOUBLE: | |
1000 case CLOBBER: | |
1001 case CALL: | |
1002 return; | |
1003 | |
1004 case MEM: | |
1005 in_mem = true; | |
1006 break; | |
1007 | |
1008 case SEQUENCE: | |
1009 for (i = 0; i < XVECLEN (x, 0); i++) | |
1010 mark_jump_label (PATTERN (XVECEXP (x, 0, i)), | |
1011 XVECEXP (x, 0, i), 0); | |
1012 return; | |
1013 | |
1014 case SYMBOL_REF: | |
1015 if (!in_mem) | |
1016 return; | |
1017 | |
1018 /* If this is a constant-pool reference, see if it is a label. */ | |
1019 if (CONSTANT_POOL_ADDRESS_P (x)) | |
1020 mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target); | |
1021 break; | |
1022 | |
1023 /* Handle operands in the condition of an if-then-else as for a | |
1024 non-jump insn. */ | |
1025 case IF_THEN_ELSE: | |
1026 if (!is_target) | |
1027 break; | |
1028 mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false); | |
1029 mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true); | |
1030 mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true); | |
1031 return; | |
1032 | |
1033 case LABEL_REF: | |
1034 { | |
1035 rtx label = XEXP (x, 0); | |
1036 | |
1037 /* Ignore remaining references to unreachable labels that | |
1038 have been deleted. */ | |
1039 if (NOTE_P (label) | |
1040 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL) | |
1041 break; | |
1042 | |
1043 gcc_assert (LABEL_P (label)); | |
1044 | |
1045 /* Ignore references to labels of containing functions. */ | |
1046 if (LABEL_REF_NONLOCAL_P (x)) | |
1047 break; | |
1048 | |
1049 XEXP (x, 0) = label; | |
1050 if (! insn || ! INSN_DELETED_P (insn)) | |
1051 ++LABEL_NUSES (label); | |
1052 | |
1053 if (insn) | |
1054 { | |
1055 if (is_target | |
1056 /* Do not change a previous setting of JUMP_LABEL. If the | |
1057 JUMP_LABEL slot is occupied by a different label, | |
1058 create a note for this label. */ | |
1059 && (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label)) | |
1060 JUMP_LABEL (insn) = label; | |
1061 else | |
1062 { | |
1063 enum reg_note kind | |
1064 = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND; | |
1065 | |
1066 /* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note | |
1067 for LABEL unless there already is one. All uses of | |
1068 a label, except for the primary target of a jump, | |
1069 must have such a note. */ | |
1070 if (! find_reg_note (insn, kind, label)) | |
1071 add_reg_note (insn, kind, label); | |
1072 } | |
1073 } | |
1074 return; | |
1075 } | |
1076 | |
1077 /* Do walk the labels in a vector, but not the first operand of an | |
1078 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */ | |
1079 case ADDR_VEC: | |
1080 case ADDR_DIFF_VEC: | |
1081 if (! INSN_DELETED_P (insn)) | |
1082 { | |
1083 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0; | |
1084 | |
1085 for (i = 0; i < XVECLEN (x, eltnum); i++) | |
1086 mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem, | |
1087 is_target); | |
1088 } | |
1089 return; | |
1090 | |
1091 default: | |
1092 break; | |
1093 } | |
1094 | |
1095 fmt = GET_RTX_FORMAT (code); | |
1096 | |
1097 /* The primary target of a tablejump is the label of the ADDR_VEC, | |
1098 which is canonically mentioned *last* in the insn. To get it | |
1099 marked as JUMP_LABEL, we iterate over items in reverse order. */ | |
1100 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1101 { | |
1102 if (fmt[i] == 'e') | |
1103 mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target); | |
1104 else if (fmt[i] == 'E') | |
1105 { | |
1106 int j; | |
1107 | |
1108 for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
1109 mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem, | |
1110 is_target); | |
1111 } | |
1112 } | |
1113 } | |
1114 | |
1115 | |
1116 /* Delete insn INSN from the chain of insns and update label ref counts | |
1117 and delete insns now unreachable. | |
1118 | |
1119 Returns the first insn after INSN that was not deleted. | |
1120 | |
1121 Usage of this instruction is deprecated. Use delete_insn instead and | |
1122 subsequent cfg_cleanup pass to delete unreachable code if needed. */ | |
1123 | |
1124 rtx | |
1125 delete_related_insns (rtx insn) | |
1126 { | |
1127 int was_code_label = (LABEL_P (insn)); | |
1128 rtx note; | |
1129 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn); | |
1130 | |
1131 while (next && INSN_DELETED_P (next)) | |
1132 next = NEXT_INSN (next); | |
1133 | |
1134 /* This insn is already deleted => return first following nondeleted. */ | |
1135 if (INSN_DELETED_P (insn)) | |
1136 return next; | |
1137 | |
1138 delete_insn (insn); | |
1139 | |
1140 /* If instruction is followed by a barrier, | |
1141 delete the barrier too. */ | |
1142 | |
1143 if (next != 0 && BARRIER_P (next)) | |
1144 delete_insn (next); | |
1145 | |
1146 /* If deleting a jump, decrement the count of the label, | |
1147 and delete the label if it is now unused. */ | |
1148 | |
1149 if (JUMP_P (insn) && JUMP_LABEL (insn)) | |
1150 { | |
1151 rtx lab = JUMP_LABEL (insn), lab_next; | |
1152 | |
1153 if (LABEL_NUSES (lab) == 0) | |
1154 /* This can delete NEXT or PREV, | |
1155 either directly if NEXT is JUMP_LABEL (INSN), | |
1156 or indirectly through more levels of jumps. */ | |
1157 delete_related_insns (lab); | |
1158 else if (tablejump_p (insn, NULL, &lab_next)) | |
1159 { | |
1160 /* If we're deleting the tablejump, delete the dispatch table. | |
1161 We may not be able to kill the label immediately preceding | |
1162 just yet, as it might be referenced in code leading up to | |
1163 the tablejump. */ | |
1164 delete_related_insns (lab_next); | |
1165 } | |
1166 } | |
1167 | |
1168 /* Likewise if we're deleting a dispatch table. */ | |
1169 | |
1170 if (JUMP_P (insn) | |
1171 && (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
1172 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)) | |
1173 { | |
1174 rtx pat = PATTERN (insn); | |
1175 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC; | |
1176 int len = XVECLEN (pat, diff_vec_p); | |
1177 | |
1178 for (i = 0; i < len; i++) | |
1179 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0) | |
1180 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0)); | |
1181 while (next && INSN_DELETED_P (next)) | |
1182 next = NEXT_INSN (next); | |
1183 return next; | |
1184 } | |
1185 | |
1186 /* Likewise for any JUMP_P / INSN / CALL_INSN with a | |
1187 REG_LABEL_OPERAND or REG_LABEL_TARGET note. */ | |
1188 if (INSN_P (insn)) | |
1189 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
1190 if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND | |
1191 || REG_NOTE_KIND (note) == REG_LABEL_TARGET) | |
1192 /* This could also be a NOTE_INSN_DELETED_LABEL note. */ | |
1193 && LABEL_P (XEXP (note, 0))) | |
1194 if (LABEL_NUSES (XEXP (note, 0)) == 0) | |
1195 delete_related_insns (XEXP (note, 0)); | |
1196 | |
1197 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev))) | |
1198 prev = PREV_INSN (prev); | |
1199 | |
1200 /* If INSN was a label and a dispatch table follows it, | |
1201 delete the dispatch table. The tablejump must have gone already. | |
1202 It isn't useful to fall through into a table. */ | |
1203 | |
1204 if (was_code_label | |
1205 && NEXT_INSN (insn) != 0 | |
1206 && JUMP_P (NEXT_INSN (insn)) | |
1207 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC | |
1208 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC)) | |
1209 next = delete_related_insns (NEXT_INSN (insn)); | |
1210 | |
1211 /* If INSN was a label, delete insns following it if now unreachable. */ | |
1212 | |
1213 if (was_code_label && prev && BARRIER_P (prev)) | |
1214 { | |
1215 enum rtx_code code; | |
1216 while (next) | |
1217 { | |
1218 code = GET_CODE (next); | |
1219 if (code == NOTE) | |
1220 next = NEXT_INSN (next); | |
1221 /* Keep going past other deleted labels to delete what follows. */ | |
1222 else if (code == CODE_LABEL && INSN_DELETED_P (next)) | |
1223 next = NEXT_INSN (next); | |
1224 else if (code == BARRIER || INSN_P (next)) | |
1225 /* Note: if this deletes a jump, it can cause more | |
1226 deletion of unreachable code, after a different label. | |
1227 As long as the value from this recursive call is correct, | |
1228 this invocation functions correctly. */ | |
1229 next = delete_related_insns (next); | |
1230 else | |
1231 break; | |
1232 } | |
1233 } | |
1234 | |
1235 /* I feel a little doubtful about this loop, | |
1236 but I see no clean and sure alternative way | |
1237 to find the first insn after INSN that is not now deleted. | |
1238 I hope this works. */ | |
1239 while (next && INSN_DELETED_P (next)) | |
1240 next = NEXT_INSN (next); | |
1241 return next; | |
1242 } | |
1243 | |
1244 /* Delete a range of insns from FROM to TO, inclusive. | |
1245 This is for the sake of peephole optimization, so assume | |
1246 that whatever these insns do will still be done by a new | |
1247 peephole insn that will replace them. */ | |
1248 | |
1249 void | |
1250 delete_for_peephole (rtx from, rtx to) | |
1251 { | |
1252 rtx insn = from; | |
1253 | |
1254 while (1) | |
1255 { | |
1256 rtx next = NEXT_INSN (insn); | |
1257 rtx prev = PREV_INSN (insn); | |
1258 | |
1259 if (!NOTE_P (insn)) | |
1260 { | |
1261 INSN_DELETED_P (insn) = 1; | |
1262 | |
1263 /* Patch this insn out of the chain. */ | |
1264 /* We don't do this all at once, because we | |
1265 must preserve all NOTEs. */ | |
1266 if (prev) | |
1267 NEXT_INSN (prev) = next; | |
1268 | |
1269 if (next) | |
1270 PREV_INSN (next) = prev; | |
1271 } | |
1272 | |
1273 if (insn == to) | |
1274 break; | |
1275 insn = next; | |
1276 } | |
1277 | |
1278 /* Note that if TO is an unconditional jump | |
1279 we *do not* delete the BARRIER that follows, | |
1280 since the peephole that replaces this sequence | |
1281 is also an unconditional jump in that case. */ | |
1282 } | |
1283 | |
1284 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or | |
1285 NLABEL as a return. Accrue modifications into the change group. */ | |
1286 | |
1287 static void | |
1288 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn) | |
1289 { | |
1290 rtx x = *loc; | |
1291 RTX_CODE code = GET_CODE (x); | |
1292 int i; | |
1293 const char *fmt; | |
1294 | |
1295 if (code == LABEL_REF) | |
1296 { | |
1297 if (XEXP (x, 0) == olabel) | |
1298 { | |
1299 rtx n; | |
1300 if (nlabel) | |
1301 n = gen_rtx_LABEL_REF (Pmode, nlabel); | |
1302 else | |
1303 n = gen_rtx_RETURN (VOIDmode); | |
1304 | |
1305 validate_change (insn, loc, n, 1); | |
1306 return; | |
1307 } | |
1308 } | |
1309 else if (code == RETURN && olabel == 0) | |
1310 { | |
1311 if (nlabel) | |
1312 x = gen_rtx_LABEL_REF (Pmode, nlabel); | |
1313 else | |
1314 x = gen_rtx_RETURN (VOIDmode); | |
1315 if (loc == &PATTERN (insn)) | |
1316 x = gen_rtx_SET (VOIDmode, pc_rtx, x); | |
1317 validate_change (insn, loc, x, 1); | |
1318 return; | |
1319 } | |
1320 | |
1321 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx | |
1322 && GET_CODE (SET_SRC (x)) == LABEL_REF | |
1323 && XEXP (SET_SRC (x), 0) == olabel) | |
1324 { | |
1325 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1); | |
1326 return; | |
1327 } | |
1328 | |
1329 if (code == IF_THEN_ELSE) | |
1330 { | |
1331 /* Skip the condition of an IF_THEN_ELSE. We only want to | |
1332 change jump destinations, not eventual label comparisons. */ | |
1333 redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn); | |
1334 redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn); | |
1335 return; | |
1336 } | |
1337 | |
1338 fmt = GET_RTX_FORMAT (code); | |
1339 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1340 { | |
1341 if (fmt[i] == 'e') | |
1342 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn); | |
1343 else if (fmt[i] == 'E') | |
1344 { | |
1345 int j; | |
1346 for (j = 0; j < XVECLEN (x, i); j++) | |
1347 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn); | |
1348 } | |
1349 } | |
1350 } | |
1351 | |
1352 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue | |
1353 the modifications into the change group. Return false if we did | |
1354 not see how to do that. */ | |
1355 | |
1356 int | |
1357 redirect_jump_1 (rtx jump, rtx nlabel) | |
1358 { | |
1359 int ochanges = num_validated_changes (); | |
1360 rtx *loc; | |
1361 | |
1362 if (GET_CODE (PATTERN (jump)) == PARALLEL) | |
1363 loc = &XVECEXP (PATTERN (jump), 0, 0); | |
1364 else | |
1365 loc = &PATTERN (jump); | |
1366 | |
1367 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump); | |
1368 return num_validated_changes () > ochanges; | |
1369 } | |
1370 | |
1371 /* Make JUMP go to NLABEL instead of where it jumps now. If the old | |
1372 jump target label is unused as a result, it and the code following | |
1373 it may be deleted. | |
1374 | |
1375 If NLABEL is zero, we are to turn the jump into a (possibly conditional) | |
1376 RETURN insn. | |
1377 | |
1378 The return value will be 1 if the change was made, 0 if it wasn't | |
1379 (this can only occur for NLABEL == 0). */ | |
1380 | |
1381 int | |
1382 redirect_jump (rtx jump, rtx nlabel, int delete_unused) | |
1383 { | |
1384 rtx olabel = JUMP_LABEL (jump); | |
1385 | |
1386 if (nlabel == olabel) | |
1387 return 1; | |
1388 | |
1389 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ()) | |
1390 return 0; | |
1391 | |
1392 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0); | |
1393 return 1; | |
1394 } | |
1395 | |
1396 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with | |
1397 NLABEL in JUMP. | |
1398 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref | |
1399 count has dropped to zero. */ | |
1400 void | |
1401 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused, | |
1402 int invert) | |
1403 { | |
1404 rtx note; | |
1405 | |
1406 gcc_assert (JUMP_LABEL (jump) == olabel); | |
1407 | |
1408 /* Negative DELETE_UNUSED used to be used to signalize behavior on | |
1409 moving FUNCTION_END note. Just sanity check that no user still worry | |
1410 about this. */ | |
1411 gcc_assert (delete_unused >= 0); | |
1412 JUMP_LABEL (jump) = nlabel; | |
1413 if (nlabel) | |
1414 ++LABEL_NUSES (nlabel); | |
1415 | |
1416 /* Update labels in any REG_EQUAL note. */ | |
1417 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX) | |
1418 { | |
1419 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump))) | |
1420 remove_note (jump, note); | |
1421 else | |
1422 { | |
1423 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump); | |
1424 confirm_change_group (); | |
1425 } | |
1426 } | |
1427 | |
1428 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0 | |
1429 /* Undefined labels will remain outside the insn stream. */ | |
1430 && INSN_UID (olabel)) | |
1431 delete_related_insns (olabel); | |
1432 if (invert) | |
1433 invert_br_probabilities (jump); | |
1434 } | |
1435 | |
1436 /* Invert the jump condition X contained in jump insn INSN. Accrue the | |
1437 modifications into the change group. Return nonzero for success. */ | |
1438 static int | |
1439 invert_exp_1 (rtx x, rtx insn) | |
1440 { | |
1441 RTX_CODE code = GET_CODE (x); | |
1442 | |
1443 if (code == IF_THEN_ELSE) | |
1444 { | |
1445 rtx comp = XEXP (x, 0); | |
1446 rtx tem; | |
1447 enum rtx_code reversed_code; | |
1448 | |
1449 /* We can do this in two ways: The preferable way, which can only | |
1450 be done if this is not an integer comparison, is to reverse | |
1451 the comparison code. Otherwise, swap the THEN-part and ELSE-part | |
1452 of the IF_THEN_ELSE. If we can't do either, fail. */ | |
1453 | |
1454 reversed_code = reversed_comparison_code (comp, insn); | |
1455 | |
1456 if (reversed_code != UNKNOWN) | |
1457 { | |
1458 validate_change (insn, &XEXP (x, 0), | |
1459 gen_rtx_fmt_ee (reversed_code, | |
1460 GET_MODE (comp), XEXP (comp, 0), | |
1461 XEXP (comp, 1)), | |
1462 1); | |
1463 return 1; | |
1464 } | |
1465 | |
1466 tem = XEXP (x, 1); | |
1467 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1); | |
1468 validate_change (insn, &XEXP (x, 2), tem, 1); | |
1469 return 1; | |
1470 } | |
1471 else | |
1472 return 0; | |
1473 } | |
1474 | |
1475 /* Invert the condition of the jump JUMP, and make it jump to label | |
1476 NLABEL instead of where it jumps now. Accrue changes into the | |
1477 change group. Return false if we didn't see how to perform the | |
1478 inversion and redirection. */ | |
1479 | |
1480 int | |
1481 invert_jump_1 (rtx jump, rtx nlabel) | |
1482 { | |
1483 rtx x = pc_set (jump); | |
1484 int ochanges; | |
1485 int ok; | |
1486 | |
1487 ochanges = num_validated_changes (); | |
1488 gcc_assert (x); | |
1489 ok = invert_exp_1 (SET_SRC (x), jump); | |
1490 gcc_assert (ok); | |
1491 | |
1492 if (num_validated_changes () == ochanges) | |
1493 return 0; | |
1494 | |
1495 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is | |
1496 in Pmode, so checking this is not merely an optimization. */ | |
1497 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel); | |
1498 } | |
1499 | |
1500 /* Invert the condition of the jump JUMP, and make it jump to label | |
1501 NLABEL instead of where it jumps now. Return true if successful. */ | |
1502 | |
1503 int | |
1504 invert_jump (rtx jump, rtx nlabel, int delete_unused) | |
1505 { | |
1506 rtx olabel = JUMP_LABEL (jump); | |
1507 | |
1508 if (invert_jump_1 (jump, nlabel) && apply_change_group ()) | |
1509 { | |
1510 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1); | |
1511 return 1; | |
1512 } | |
1513 cancel_changes (0); | |
1514 return 0; | |
1515 } | |
1516 | |
1517 | |
1518 /* Like rtx_equal_p except that it considers two REGs as equal | |
1519 if they renumber to the same value and considers two commutative | |
1520 operations to be the same if the order of the operands has been | |
1521 reversed. */ | |
1522 | |
1523 int | |
1524 rtx_renumbered_equal_p (const_rtx x, const_rtx y) | |
1525 { | |
1526 int i; | |
1527 const enum rtx_code code = GET_CODE (x); | |
1528 const char *fmt; | |
1529 | |
1530 if (x == y) | |
1531 return 1; | |
1532 | |
1533 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x)))) | |
1534 && (REG_P (y) || (GET_CODE (y) == SUBREG | |
1535 && REG_P (SUBREG_REG (y))))) | |
1536 { | |
1537 int reg_x = -1, reg_y = -1; | |
1538 int byte_x = 0, byte_y = 0; | |
1539 | |
1540 if (GET_MODE (x) != GET_MODE (y)) | |
1541 return 0; | |
1542 | |
1543 /* If we haven't done any renumbering, don't | |
1544 make any assumptions. */ | |
1545 if (reg_renumber == 0) | |
1546 return rtx_equal_p (x, y); | |
1547 | |
1548 if (code == SUBREG) | |
1549 { | |
1550 reg_x = REGNO (SUBREG_REG (x)); | |
1551 byte_x = SUBREG_BYTE (x); | |
1552 | |
1553 if (reg_renumber[reg_x] >= 0) | |
1554 { | |
1555 if (!subreg_offset_representable_p (reg_renumber[reg_x], | |
1556 GET_MODE (SUBREG_REG (x)), | |
1557 byte_x, | |
1558 GET_MODE (x))) | |
1559 return 0; | |
1560 reg_x = subreg_regno_offset (reg_renumber[reg_x], | |
1561 GET_MODE (SUBREG_REG (x)), | |
1562 byte_x, | |
1563 GET_MODE (x)); | |
1564 byte_x = 0; | |
1565 } | |
1566 } | |
1567 else | |
1568 { | |
1569 reg_x = REGNO (x); | |
1570 if (reg_renumber[reg_x] >= 0) | |
1571 reg_x = reg_renumber[reg_x]; | |
1572 } | |
1573 | |
1574 if (GET_CODE (y) == SUBREG) | |
1575 { | |
1576 reg_y = REGNO (SUBREG_REG (y)); | |
1577 byte_y = SUBREG_BYTE (y); | |
1578 | |
1579 if (reg_renumber[reg_y] >= 0) | |
1580 { | |
1581 if (!subreg_offset_representable_p (reg_renumber[reg_y], | |
1582 GET_MODE (SUBREG_REG (y)), | |
1583 byte_y, | |
1584 GET_MODE (y))) | |
1585 return 0; | |
1586 reg_y = subreg_regno_offset (reg_renumber[reg_y], | |
1587 GET_MODE (SUBREG_REG (y)), | |
1588 byte_y, | |
1589 GET_MODE (y)); | |
1590 byte_y = 0; | |
1591 } | |
1592 } | |
1593 else | |
1594 { | |
1595 reg_y = REGNO (y); | |
1596 if (reg_renumber[reg_y] >= 0) | |
1597 reg_y = reg_renumber[reg_y]; | |
1598 } | |
1599 | |
1600 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y; | |
1601 } | |
1602 | |
1603 /* Now we have disposed of all the cases | |
1604 in which different rtx codes can match. */ | |
1605 if (code != GET_CODE (y)) | |
1606 return 0; | |
1607 | |
1608 switch (code) | |
1609 { | |
1610 case PC: | |
1611 case CC0: | |
1612 case ADDR_VEC: | |
1613 case ADDR_DIFF_VEC: | |
1614 case CONST_INT: | |
1615 case CONST_DOUBLE: | |
1616 return 0; | |
1617 | |
1618 case LABEL_REF: | |
1619 /* We can't assume nonlocal labels have their following insns yet. */ | |
1620 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y)) | |
1621 return XEXP (x, 0) == XEXP (y, 0); | |
1622 | |
1623 /* Two label-refs are equivalent if they point at labels | |
1624 in the same position in the instruction stream. */ | |
1625 return (next_real_insn (XEXP (x, 0)) | |
1626 == next_real_insn (XEXP (y, 0))); | |
1627 | |
1628 case SYMBOL_REF: | |
1629 return XSTR (x, 0) == XSTR (y, 0); | |
1630 | |
1631 case CODE_LABEL: | |
1632 /* If we didn't match EQ equality above, they aren't the same. */ | |
1633 return 0; | |
1634 | |
1635 default: | |
1636 break; | |
1637 } | |
1638 | |
1639 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
1640 | |
1641 if (GET_MODE (x) != GET_MODE (y)) | |
1642 return 0; | |
1643 | |
1644 /* For commutative operations, the RTX match if the operand match in any | |
1645 order. Also handle the simple binary and unary cases without a loop. */ | |
1646 if (targetm.commutative_p (x, UNKNOWN)) | |
1647 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)) | |
1648 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1))) | |
1649 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1)) | |
1650 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0)))); | |
1651 else if (NON_COMMUTATIVE_P (x)) | |
1652 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)) | |
1653 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1))); | |
1654 else if (UNARY_P (x)) | |
1655 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0)); | |
1656 | |
1657 /* Compare the elements. If any pair of corresponding elements | |
1658 fail to match, return 0 for the whole things. */ | |
1659 | |
1660 fmt = GET_RTX_FORMAT (code); | |
1661 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1662 { | |
1663 int j; | |
1664 switch (fmt[i]) | |
1665 { | |
1666 case 'w': | |
1667 if (XWINT (x, i) != XWINT (y, i)) | |
1668 return 0; | |
1669 break; | |
1670 | |
1671 case 'i': | |
1672 if (XINT (x, i) != XINT (y, i)) | |
1673 return 0; | |
1674 break; | |
1675 | |
1676 case 't': | |
1677 if (XTREE (x, i) != XTREE (y, i)) | |
1678 return 0; | |
1679 break; | |
1680 | |
1681 case 's': | |
1682 if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1683 return 0; | |
1684 break; | |
1685 | |
1686 case 'e': | |
1687 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i))) | |
1688 return 0; | |
1689 break; | |
1690 | |
1691 case 'u': | |
1692 if (XEXP (x, i) != XEXP (y, i)) | |
1693 return 0; | |
1694 /* Fall through. */ | |
1695 case '0': | |
1696 break; | |
1697 | |
1698 case 'E': | |
1699 if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1700 return 0; | |
1701 for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
1702 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j))) | |
1703 return 0; | |
1704 break; | |
1705 | |
1706 default: | |
1707 gcc_unreachable (); | |
1708 } | |
1709 } | |
1710 return 1; | |
1711 } | |
1712 | |
1713 /* If X is a hard register or equivalent to one or a subregister of one, | |
1714 return the hard register number. If X is a pseudo register that was not | |
1715 assigned a hard register, return the pseudo register number. Otherwise, | |
1716 return -1. Any rtx is valid for X. */ | |
1717 | |
1718 int | |
1719 true_regnum (const_rtx x) | |
1720 { | |
1721 if (REG_P (x)) | |
1722 { | |
1723 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0) | |
1724 return reg_renumber[REGNO (x)]; | |
1725 return REGNO (x); | |
1726 } | |
1727 if (GET_CODE (x) == SUBREG) | |
1728 { | |
1729 int base = true_regnum (SUBREG_REG (x)); | |
1730 if (base >= 0 | |
1731 && base < FIRST_PSEUDO_REGISTER | |
1732 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)), | |
1733 GET_MODE (SUBREG_REG (x)), | |
1734 SUBREG_BYTE (x), GET_MODE (x))) | |
1735 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)), | |
1736 GET_MODE (SUBREG_REG (x)), | |
1737 SUBREG_BYTE (x), GET_MODE (x)); | |
1738 } | |
1739 return -1; | |
1740 } | |
1741 | |
1742 /* Return regno of the register REG and handle subregs too. */ | |
1743 unsigned int | |
1744 reg_or_subregno (const_rtx reg) | |
1745 { | |
1746 if (GET_CODE (reg) == SUBREG) | |
1747 reg = SUBREG_REG (reg); | |
1748 gcc_assert (REG_P (reg)); | |
1749 return REGNO (reg); | |
1750 } |