Mercurial > hg > CbC > CbC_gcc
annotate gcc/resource.c @ 66:b362627d71ba
bug-fix: modify tail-call-optimization enforcing rules. (calls.c.)
author | Ryoma SHINYA <shinya@firefly.cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 14 Dec 2010 03:58:33 +0900 |
parents | 77e2b8dfacca |
children | f6334be47118 |
rev | line source |
---|---|
0 | 1 /* Definitions for computing resource usage of specific insns. |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
3 2009 Free Software Foundation, Inc. |
0 | 4 |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it under | |
8 the terms of the GNU General Public License as published by the Free | |
9 Software Foundation; either version 3, or (at your option) any later | |
10 version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "toplev.h" | |
26 #include "rtl.h" | |
27 #include "tm_p.h" | |
28 #include "hard-reg-set.h" | |
29 #include "function.h" | |
30 #include "regs.h" | |
31 #include "flags.h" | |
32 #include "output.h" | |
33 #include "resource.h" | |
34 #include "except.h" | |
35 #include "insn-attr.h" | |
36 #include "params.h" | |
37 #include "df.h" | |
38 | |
39 /* This structure is used to record liveness information at the targets or | |
40 fallthrough insns of branches. We will most likely need the information | |
41 at targets again, so save them in a hash table rather than recomputing them | |
42 each time. */ | |
43 | |
44 struct target_info | |
45 { | |
46 int uid; /* INSN_UID of target. */ | |
47 struct target_info *next; /* Next info for same hash bucket. */ | |
48 HARD_REG_SET live_regs; /* Registers live at target. */ | |
49 int block; /* Basic block number containing target. */ | |
50 int bb_tick; /* Generation count of basic block info. */ | |
51 }; | |
52 | |
53 #define TARGET_HASH_PRIME 257 | |
54 | |
55 /* Indicates what resources are required at the beginning of the epilogue. */ | |
56 static struct resources start_of_epilogue_needs; | |
57 | |
58 /* Indicates what resources are required at function end. */ | |
59 static struct resources end_of_function_needs; | |
60 | |
61 /* Define the hash table itself. */ | |
62 static struct target_info **target_hash_table = NULL; | |
63 | |
64 /* For each basic block, we maintain a generation number of its basic | |
65 block info, which is updated each time we move an insn from the | |
66 target of a jump. This is the generation number indexed by block | |
67 number. */ | |
68 | |
69 static int *bb_ticks; | |
70 | |
71 /* Marks registers possibly live at the current place being scanned by | |
72 mark_target_live_regs. Also used by update_live_status. */ | |
73 | |
74 static HARD_REG_SET current_live_regs; | |
75 | |
76 /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
77 Also only used by the next two functions. */ | |
78 | |
79 static HARD_REG_SET pending_dead_regs; | |
80 | |
81 static void update_live_status (rtx, const_rtx, void *); | |
82 static int find_basic_block (rtx, int); | |
83 static rtx next_insn_no_annul (rtx); | |
84 static rtx find_dead_or_set_registers (rtx, struct resources*, | |
85 rtx*, int, struct resources, | |
86 struct resources); | |
87 | |
88 /* Utility function called from mark_target_live_regs via note_stores. | |
89 It deadens any CLOBBERed registers and livens any SET registers. */ | |
90 | |
91 static void | |
92 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) | |
93 { | |
94 int first_regno, last_regno; | |
95 int i; | |
96 | |
97 if (!REG_P (dest) | |
98 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest)))) | |
99 return; | |
100 | |
101 if (GET_CODE (dest) == SUBREG) | |
102 { | |
103 first_regno = subreg_regno (dest); | |
104 last_regno = first_regno + subreg_nregs (dest); | |
105 | |
106 } | |
107 else | |
108 { | |
109 first_regno = REGNO (dest); | |
110 last_regno = END_HARD_REGNO (dest); | |
111 } | |
112 | |
113 if (GET_CODE (x) == CLOBBER) | |
114 for (i = first_regno; i < last_regno; i++) | |
115 CLEAR_HARD_REG_BIT (current_live_regs, i); | |
116 else | |
117 for (i = first_regno; i < last_regno; i++) | |
118 { | |
119 SET_HARD_REG_BIT (current_live_regs, i); | |
120 CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
121 } | |
122 } | |
123 | |
124 /* Find the number of the basic block with correct live register | |
125 information that starts closest to INSN. Return -1 if we couldn't | |
126 find such a basic block or the beginning is more than | |
127 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for | |
128 an unlimited search. | |
129 | |
130 The delay slot filling code destroys the control-flow graph so, | |
131 instead of finding the basic block containing INSN, we search | |
132 backwards toward a BARRIER where the live register information is | |
133 correct. */ | |
134 | |
135 static int | |
136 find_basic_block (rtx insn, int search_limit) | |
137 { | |
138 /* Scan backwards to the previous BARRIER. Then see if we can find a | |
139 label that starts a basic block. Return the basic block number. */ | |
140 for (insn = prev_nonnote_insn (insn); | |
141 insn && !BARRIER_P (insn) && search_limit != 0; | |
142 insn = prev_nonnote_insn (insn), --search_limit) | |
143 ; | |
144 | |
145 /* The closest BARRIER is too far away. */ | |
146 if (search_limit == 0) | |
147 return -1; | |
148 | |
149 /* The start of the function. */ | |
150 else if (insn == 0) | |
151 return ENTRY_BLOCK_PTR->next_bb->index; | |
152 | |
153 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
154 anything other than a CODE_LABEL or note, we can't find this code. */ | |
155 for (insn = next_nonnote_insn (insn); | |
156 insn && LABEL_P (insn); | |
157 insn = next_nonnote_insn (insn)) | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
158 if (BLOCK_FOR_INSN (insn)) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
159 return BLOCK_FOR_INSN (insn)->index; |
0 | 160 |
161 return -1; | |
162 } | |
163 | |
164 /* Similar to next_insn, but ignores insns in the delay slots of | |
165 an annulled branch. */ | |
166 | |
167 static rtx | |
168 next_insn_no_annul (rtx insn) | |
169 { | |
170 if (insn) | |
171 { | |
172 /* If INSN is an annulled branch, skip any insns from the target | |
173 of the branch. */ | |
174 if (INSN_P (insn) | |
175 && INSN_ANNULLED_BRANCH_P (insn) | |
176 && NEXT_INSN (PREV_INSN (insn)) != insn) | |
177 { | |
178 rtx next = NEXT_INSN (insn); | |
179 enum rtx_code code = GET_CODE (next); | |
180 | |
181 while ((code == INSN || code == JUMP_INSN || code == CALL_INSN) | |
182 && INSN_FROM_TARGET_P (next)) | |
183 { | |
184 insn = next; | |
185 next = NEXT_INSN (insn); | |
186 code = GET_CODE (next); | |
187 } | |
188 } | |
189 | |
190 insn = NEXT_INSN (insn); | |
191 if (insn && NONJUMP_INSN_P (insn) | |
192 && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
193 insn = XVECEXP (PATTERN (insn), 0, 0); | |
194 } | |
195 | |
196 return insn; | |
197 } | |
198 | |
199 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
200 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS | |
201 is TRUE, resources used by the called routine will be included for | |
202 CALL_INSNs. */ | |
203 | |
204 void | |
205 mark_referenced_resources (rtx x, struct resources *res, | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
206 bool include_delayed_effects) |
0 | 207 { |
208 enum rtx_code code = GET_CODE (x); | |
209 int i, j; | |
210 unsigned int r; | |
211 const char *format_ptr; | |
212 | |
213 /* Handle leaf items for which we set resource flags. Also, special-case | |
214 CALL, SET and CLOBBER operators. */ | |
215 switch (code) | |
216 { | |
217 case CONST: | |
218 case CONST_INT: | |
219 case CONST_DOUBLE: | |
220 case CONST_FIXED: | |
221 case CONST_VECTOR: | |
222 case PC: | |
223 case SYMBOL_REF: | |
224 case LABEL_REF: | |
225 return; | |
226 | |
227 case SUBREG: | |
228 if (!REG_P (SUBREG_REG (x))) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
229 mark_referenced_resources (SUBREG_REG (x), res, false); |
0 | 230 else |
231 { | |
232 unsigned int regno = subreg_regno (x); | |
233 unsigned int last_regno = regno + subreg_nregs (x); | |
234 | |
235 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); | |
236 for (r = regno; r < last_regno; r++) | |
237 SET_HARD_REG_BIT (res->regs, r); | |
238 } | |
239 return; | |
240 | |
241 case REG: | |
242 gcc_assert (HARD_REGISTER_P (x)); | |
243 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
244 return; | |
245 | |
246 case MEM: | |
247 /* If this memory shouldn't change, it really isn't referencing | |
248 memory. */ | |
249 if (MEM_READONLY_P (x)) | |
250 res->unch_memory = 1; | |
251 else | |
252 res->memory = 1; | |
253 res->volatil |= MEM_VOLATILE_P (x); | |
254 | |
255 /* Mark registers used to access memory. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
256 mark_referenced_resources (XEXP (x, 0), res, false); |
0 | 257 return; |
258 | |
259 case CC0: | |
260 res->cc = 1; | |
261 return; | |
262 | |
263 case UNSPEC_VOLATILE: | |
264 case TRAP_IF: | |
265 case ASM_INPUT: | |
266 /* Traditional asm's are always volatile. */ | |
267 res->volatil = 1; | |
268 break; | |
269 | |
270 case ASM_OPERANDS: | |
271 res->volatil |= MEM_VOLATILE_P (x); | |
272 | |
273 /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
274 We can not just fall through here since then we would be confused | |
275 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
276 traditional asms unlike their normal usage. */ | |
277 | |
278 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
279 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false); |
0 | 280 return; |
281 | |
282 case CALL: | |
283 /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
284 memory. The second operand may be referenced, though. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
285 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
286 mark_referenced_resources (XEXP (x, 1), res, false); |
0 | 287 return; |
288 | |
289 case SET: | |
290 /* Usually, the first operand of SET is set, not referenced. But | |
291 registers used to access memory are referenced. SET_DEST is | |
292 also referenced if it is a ZERO_EXTRACT. */ | |
293 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
294 mark_referenced_resources (SET_SRC (x), res, false); |
0 | 295 |
296 x = SET_DEST (x); | |
297 if (GET_CODE (x) == ZERO_EXTRACT | |
298 || GET_CODE (x) == STRICT_LOW_PART) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
299 mark_referenced_resources (x, res, false); |
0 | 300 else if (GET_CODE (x) == SUBREG) |
301 x = SUBREG_REG (x); | |
302 if (MEM_P (x)) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
303 mark_referenced_resources (XEXP (x, 0), res, false); |
0 | 304 return; |
305 | |
306 case CLOBBER: | |
307 return; | |
308 | |
309 case CALL_INSN: | |
310 if (include_delayed_effects) | |
311 { | |
312 /* A CALL references memory, the frame pointer if it exists, the | |
313 stack pointer, any global registers and any registers given in | |
314 USE insns immediately in front of the CALL. | |
315 | |
316 However, we may have moved some of the parameter loading insns | |
317 into the delay slot of this CALL. If so, the USE's for them | |
318 don't count and should be skipped. */ | |
319 rtx insn = PREV_INSN (x); | |
320 rtx sequence = 0; | |
321 int seq_size = 0; | |
322 int i; | |
323 | |
324 /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
325 if (NEXT_INSN (insn) != x) | |
326 { | |
327 sequence = PATTERN (NEXT_INSN (insn)); | |
328 seq_size = XVECLEN (sequence, 0); | |
329 gcc_assert (GET_CODE (sequence) == SEQUENCE); | |
330 } | |
331 | |
332 res->memory = 1; | |
333 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
334 if (frame_pointer_needed) | |
335 { | |
336 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
337 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
338 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); | |
339 #endif | |
340 } | |
341 | |
342 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
343 if (global_regs[i]) | |
344 SET_HARD_REG_BIT (res->regs, i); | |
345 | |
346 /* Check for a REG_SETJMP. If it exists, then we must | |
347 assume that this call can need any register. | |
348 | |
349 This is done to be more conservative about how we handle setjmp. | |
350 We assume that they both use and set all registers. Using all | |
351 registers ensures that a register will not be considered dead | |
352 just because it crosses a setjmp call. A register should be | |
353 considered dead only if the setjmp call returns nonzero. */ | |
354 if (find_reg_note (x, REG_SETJMP, NULL)) | |
355 SET_HARD_REG_SET (res->regs); | |
356 | |
357 { | |
358 rtx link; | |
359 | |
360 for (link = CALL_INSN_FUNCTION_USAGE (x); | |
361 link; | |
362 link = XEXP (link, 1)) | |
363 if (GET_CODE (XEXP (link, 0)) == USE) | |
364 { | |
365 for (i = 1; i < seq_size; i++) | |
366 { | |
367 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i)); | |
368 if (GET_CODE (slot_pat) == SET | |
369 && rtx_equal_p (SET_DEST (slot_pat), | |
370 XEXP (XEXP (link, 0), 0))) | |
371 break; | |
372 } | |
373 if (i >= seq_size) | |
374 mark_referenced_resources (XEXP (XEXP (link, 0), 0), | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
375 res, false); |
0 | 376 } |
377 } | |
378 } | |
379 | |
380 /* ... fall through to other INSN processing ... */ | |
381 | |
382 case INSN: | |
383 case JUMP_INSN: | |
384 | |
385 #ifdef INSN_REFERENCES_ARE_DELAYED | |
386 if (! include_delayed_effects | |
387 && INSN_REFERENCES_ARE_DELAYED (x)) | |
388 return; | |
389 #endif | |
390 | |
391 /* No special processing, just speed up. */ | |
392 mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
393 return; | |
394 | |
395 default: | |
396 break; | |
397 } | |
398 | |
399 /* Process each sub-expression and flag what it needs. */ | |
400 format_ptr = GET_RTX_FORMAT (code); | |
401 for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
402 switch (*format_ptr++) | |
403 { | |
404 case 'e': | |
405 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
406 break; | |
407 | |
408 case 'E': | |
409 for (j = 0; j < XVECLEN (x, i); j++) | |
410 mark_referenced_resources (XVECEXP (x, i, j), res, | |
411 include_delayed_effects); | |
412 break; | |
413 } | |
414 } | |
415 | |
416 /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
417 looking for registers that are set before they are used. These are dead. | |
418 Stop after passing a few conditional jumps, and/or a small | |
419 number of unconditional branches. */ | |
420 | |
421 static rtx | |
422 find_dead_or_set_registers (rtx target, struct resources *res, | |
423 rtx *jump_target, int jump_count, | |
424 struct resources set, struct resources needed) | |
425 { | |
426 HARD_REG_SET scratch; | |
427 rtx insn, next; | |
428 rtx jump_insn = 0; | |
429 int i; | |
430 | |
431 for (insn = target; insn; insn = next) | |
432 { | |
433 rtx this_jump_insn = insn; | |
434 | |
435 next = NEXT_INSN (insn); | |
436 | |
437 /* If this instruction can throw an exception, then we don't | |
438 know where we might end up next. That means that we have to | |
439 assume that whatever we have already marked as live really is | |
440 live. */ | |
441 if (can_throw_internal (insn)) | |
442 break; | |
443 | |
444 switch (GET_CODE (insn)) | |
445 { | |
446 case CODE_LABEL: | |
447 /* After a label, any pending dead registers that weren't yet | |
448 used can be made dead. */ | |
449 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs); | |
450 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs); | |
451 CLEAR_HARD_REG_SET (pending_dead_regs); | |
452 | |
453 continue; | |
454 | |
455 case BARRIER: | |
456 case NOTE: | |
457 continue; | |
458 | |
459 case INSN: | |
460 if (GET_CODE (PATTERN (insn)) == USE) | |
461 { | |
462 /* If INSN is a USE made by update_block, we care about the | |
463 underlying insn. Any registers set by the underlying insn | |
464 are live since the insn is being done somewhere else. */ | |
465 if (INSN_P (XEXP (PATTERN (insn), 0))) | |
466 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, | |
467 MARK_SRC_DEST_CALL); | |
468 | |
469 /* All other USE insns are to be ignored. */ | |
470 continue; | |
471 } | |
472 else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
473 continue; | |
474 else if (GET_CODE (PATTERN (insn)) == SEQUENCE) | |
475 { | |
476 /* An unconditional jump can be used to fill the delay slot | |
477 of a call, so search for a JUMP_INSN in any position. */ | |
478 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
479 { | |
480 this_jump_insn = XVECEXP (PATTERN (insn), 0, i); | |
481 if (JUMP_P (this_jump_insn)) | |
482 break; | |
483 } | |
484 } | |
485 | |
486 default: | |
487 break; | |
488 } | |
489 | |
490 if (JUMP_P (this_jump_insn)) | |
491 { | |
492 if (jump_count++ < 10) | |
493 { | |
494 if (any_uncondjump_p (this_jump_insn) | |
495 || GET_CODE (PATTERN (this_jump_insn)) == RETURN) | |
496 { | |
497 next = JUMP_LABEL (this_jump_insn); | |
498 if (jump_insn == 0) | |
499 { | |
500 jump_insn = insn; | |
501 if (jump_target) | |
502 *jump_target = JUMP_LABEL (this_jump_insn); | |
503 } | |
504 } | |
505 else if (any_condjump_p (this_jump_insn)) | |
506 { | |
507 struct resources target_set, target_res; | |
508 struct resources fallthrough_res; | |
509 | |
510 /* We can handle conditional branches here by following | |
511 both paths, and then IOR the results of the two paths | |
512 together, which will give us registers that are dead | |
513 on both paths. Since this is expensive, we give it | |
514 a much higher cost than unconditional branches. The | |
515 cost was chosen so that we will follow at most 1 | |
516 conditional branch. */ | |
517 | |
518 jump_count += 4; | |
519 if (jump_count >= 10) | |
520 break; | |
521 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
522 mark_referenced_resources (insn, &needed, true); |
0 | 523 |
524 /* For an annulled branch, mark_set_resources ignores slots | |
525 filled by instructions from the target. This is correct | |
526 if the branch is not taken. Since we are following both | |
527 paths from the branch, we must also compute correct info | |
528 if the branch is taken. We do this by inverting all of | |
529 the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
530 and then inverting the INSN_FROM_TARGET_P bits again. */ | |
531 | |
532 if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
533 && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
534 { | |
535 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
536 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
537 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
538 | |
539 target_set = set; | |
540 mark_set_resources (insn, &target_set, 0, | |
541 MARK_SRC_DEST_CALL); | |
542 | |
543 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) | |
544 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) | |
545 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); | |
546 | |
547 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
548 } | |
549 else | |
550 { | |
551 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
552 target_set = set; | |
553 } | |
554 | |
555 target_res = *res; | |
556 COPY_HARD_REG_SET (scratch, target_set.regs); | |
557 AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
558 AND_COMPL_HARD_REG_SET (target_res.regs, scratch); | |
559 | |
560 fallthrough_res = *res; | |
561 COPY_HARD_REG_SET (scratch, set.regs); | |
562 AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
563 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | |
564 | |
565 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | |
566 &target_res, 0, jump_count, | |
567 target_set, needed); | |
568 find_dead_or_set_registers (next, | |
569 &fallthrough_res, 0, jump_count, | |
570 set, needed); | |
571 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); | |
572 AND_HARD_REG_SET (res->regs, fallthrough_res.regs); | |
573 break; | |
574 } | |
575 else | |
576 break; | |
577 } | |
578 else | |
579 { | |
580 /* Don't try this optimization if we expired our jump count | |
581 above, since that would mean there may be an infinite loop | |
582 in the function being compiled. */ | |
583 jump_insn = 0; | |
584 break; | |
585 } | |
586 } | |
587 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
588 mark_referenced_resources (insn, &needed, true); |
0 | 589 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
590 | |
591 COPY_HARD_REG_SET (scratch, set.regs); | |
592 AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
593 AND_COMPL_HARD_REG_SET (res->regs, scratch); | |
594 } | |
595 | |
596 return jump_insn; | |
597 } | |
598 | |
599 /* Given X, a part of an insn, and a pointer to a `struct resource', | |
600 RES, indicate which resources are modified by the insn. If | |
601 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially | |
602 set by the called routine. | |
603 | |
604 If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
605 objects are being referenced instead of set. | |
606 | |
607 We never mark the insn as modifying the condition code unless it explicitly | |
608 SETs CC0 even though this is not totally correct. The reason for this is | |
609 that we require a SET of CC0 to immediately precede the reference to CC0. | |
610 So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
611 our computation and thus may be placed in a delay slot. */ | |
612 | |
613 void | |
614 mark_set_resources (rtx x, struct resources *res, int in_dest, | |
615 enum mark_resource_type mark_type) | |
616 { | |
617 enum rtx_code code; | |
618 int i, j; | |
619 unsigned int r; | |
620 const char *format_ptr; | |
621 | |
622 restart: | |
623 | |
624 code = GET_CODE (x); | |
625 | |
626 switch (code) | |
627 { | |
628 case NOTE: | |
629 case BARRIER: | |
630 case CODE_LABEL: | |
631 case USE: | |
632 case CONST_INT: | |
633 case CONST_DOUBLE: | |
634 case CONST_FIXED: | |
635 case CONST_VECTOR: | |
636 case LABEL_REF: | |
637 case SYMBOL_REF: | |
638 case CONST: | |
639 case PC: | |
640 /* These don't set any resources. */ | |
641 return; | |
642 | |
643 case CC0: | |
644 if (in_dest) | |
645 res->cc = 1; | |
646 return; | |
647 | |
648 case CALL_INSN: | |
649 /* Called routine modifies the condition code, memory, any registers | |
650 that aren't saved across calls, global registers and anything | |
651 explicitly CLOBBERed immediately after the CALL_INSN. */ | |
652 | |
653 if (mark_type == MARK_SRC_DEST_CALL) | |
654 { | |
655 rtx link; | |
656 | |
657 res->cc = res->memory = 1; | |
658 | |
659 IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call); | |
660 | |
661 for (link = CALL_INSN_FUNCTION_USAGE (x); | |
662 link; link = XEXP (link, 1)) | |
663 if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
664 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, | |
665 MARK_SRC_DEST); | |
666 | |
667 /* Check for a REG_SETJMP. If it exists, then we must | |
668 assume that this call can clobber any register. */ | |
669 if (find_reg_note (x, REG_SETJMP, NULL)) | |
670 SET_HARD_REG_SET (res->regs); | |
671 } | |
672 | |
673 /* ... and also what its RTL says it modifies, if anything. */ | |
674 | |
675 case JUMP_INSN: | |
676 case INSN: | |
677 | |
678 /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
679 and doesn't actually do anything, so we ignore it. */ | |
680 | |
681 #ifdef INSN_SETS_ARE_DELAYED | |
682 if (mark_type != MARK_SRC_DEST_CALL | |
683 && INSN_SETS_ARE_DELAYED (x)) | |
684 return; | |
685 #endif | |
686 | |
687 x = PATTERN (x); | |
688 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
689 goto restart; | |
690 return; | |
691 | |
692 case SET: | |
693 /* If the source of a SET is a CALL, this is actually done by | |
694 the called routine. So only include it if we are to include the | |
695 effects of the calling routine. */ | |
696 | |
697 mark_set_resources (SET_DEST (x), res, | |
698 (mark_type == MARK_SRC_DEST_CALL | |
699 || GET_CODE (SET_SRC (x)) != CALL), | |
700 mark_type); | |
701 | |
702 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); | |
703 return; | |
704 | |
705 case CLOBBER: | |
706 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
707 return; | |
708 | |
709 case SEQUENCE: | |
710 for (i = 0; i < XVECLEN (x, 0); i++) | |
711 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0)) | |
712 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i)))) | |
713 mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type); | |
714 return; | |
715 | |
716 case POST_INC: | |
717 case PRE_INC: | |
718 case POST_DEC: | |
719 case PRE_DEC: | |
720 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
721 return; | |
722 | |
723 case PRE_MODIFY: | |
724 case POST_MODIFY: | |
725 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
726 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST); | |
727 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST); | |
728 return; | |
729 | |
730 case SIGN_EXTRACT: | |
731 case ZERO_EXTRACT: | |
732 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); | |
733 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
734 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
735 return; | |
736 | |
737 case MEM: | |
738 if (in_dest) | |
739 { | |
740 res->memory = 1; | |
741 res->unch_memory |= MEM_READONLY_P (x); | |
742 res->volatil |= MEM_VOLATILE_P (x); | |
743 } | |
744 | |
745 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); | |
746 return; | |
747 | |
748 case SUBREG: | |
749 if (in_dest) | |
750 { | |
751 if (!REG_P (SUBREG_REG (x))) | |
752 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); | |
753 else | |
754 { | |
755 unsigned int regno = subreg_regno (x); | |
756 unsigned int last_regno = regno + subreg_nregs (x); | |
757 | |
758 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); | |
759 for (r = regno; r < last_regno; r++) | |
760 SET_HARD_REG_BIT (res->regs, r); | |
761 } | |
762 } | |
763 return; | |
764 | |
765 case REG: | |
766 if (in_dest) | |
767 { | |
768 gcc_assert (HARD_REGISTER_P (x)); | |
769 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
770 } | |
771 return; | |
772 | |
773 case UNSPEC_VOLATILE: | |
774 case ASM_INPUT: | |
775 /* Traditional asm's are always volatile. */ | |
776 res->volatil = 1; | |
777 return; | |
778 | |
779 case TRAP_IF: | |
780 res->volatil = 1; | |
781 break; | |
782 | |
783 case ASM_OPERANDS: | |
784 res->volatil |= MEM_VOLATILE_P (x); | |
785 | |
786 /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
787 We can not just fall through here since then we would be confused | |
788 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
789 traditional asms unlike their normal usage. */ | |
790 | |
791 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
792 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, | |
793 MARK_SRC_DEST); | |
794 return; | |
795 | |
796 default: | |
797 break; | |
798 } | |
799 | |
800 /* Process each sub-expression and flag what it needs. */ | |
801 format_ptr = GET_RTX_FORMAT (code); | |
802 for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
803 switch (*format_ptr++) | |
804 { | |
805 case 'e': | |
806 mark_set_resources (XEXP (x, i), res, in_dest, mark_type); | |
807 break; | |
808 | |
809 case 'E': | |
810 for (j = 0; j < XVECLEN (x, i); j++) | |
811 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); | |
812 break; | |
813 } | |
814 } | |
815 | |
816 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */ | |
817 | |
818 static bool | |
819 return_insn_p (const_rtx insn) | |
820 { | |
821 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN) | |
822 return true; | |
823 | |
824 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
825 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0)); | |
826 | |
827 return false; | |
828 } | |
829 | |
830 /* Set the resources that are live at TARGET. | |
831 | |
832 If TARGET is zero, we refer to the end of the current function and can | |
833 return our precomputed value. | |
834 | |
835 Otherwise, we try to find out what is live by consulting the basic block | |
836 information. This is tricky, because we must consider the actions of | |
837 reload and jump optimization, which occur after the basic block information | |
838 has been computed. | |
839 | |
840 Accordingly, we proceed as follows:: | |
841 | |
842 We find the previous BARRIER and look at all immediately following labels | |
843 (with no intervening active insns) to see if any of them start a basic | |
844 block. If we hit the start of the function first, we use block 0. | |
845 | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
846 Once we have found a basic block and a corresponding first insn, we can |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
847 accurately compute the live status (by starting at a label following a |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
848 BARRIER, we are immune to actions taken by reload and jump.) Then we |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
849 scan all insns between that point and our target. For each CLOBBER (or |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
850 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
851 registers are dead. For a SET, mark them as live. |
0 | 852 |
853 We have to be careful when using REG_DEAD notes because they are not | |
854 updated by such things as find_equiv_reg. So keep track of registers | |
855 marked as dead that haven't been assigned to, and mark them dead at the | |
856 next CODE_LABEL since reload and jump won't propagate values across labels. | |
857 | |
858 If we cannot find the start of a basic block (should be a very rare | |
859 case, if it can happen at all), mark everything as potentially live. | |
860 | |
861 Next, scan forward from TARGET looking for things set or clobbered | |
862 before they are used. These are not live. | |
863 | |
864 Because we can be called many times on the same target, save our results | |
865 in a hash table indexed by INSN_UID. This is only done if the function | |
866 init_resource_info () was invoked before we are called. */ | |
867 | |
868 void | |
869 mark_target_live_regs (rtx insns, rtx target, struct resources *res) | |
870 { | |
871 int b = -1; | |
872 unsigned int i; | |
873 struct target_info *tinfo = NULL; | |
874 rtx insn; | |
875 rtx jump_insn = 0; | |
876 rtx jump_target; | |
877 HARD_REG_SET scratch; | |
878 struct resources set, needed; | |
879 | |
880 /* Handle end of function. */ | |
881 if (target == 0) | |
882 { | |
883 *res = end_of_function_needs; | |
884 return; | |
885 } | |
886 | |
887 /* Handle return insn. */ | |
888 else if (return_insn_p (target)) | |
889 { | |
890 *res = end_of_function_needs; | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
891 mark_referenced_resources (target, res, false); |
0 | 892 return; |
893 } | |
894 | |
895 /* We have to assume memory is needed, but the CC isn't. */ | |
896 res->memory = 1; | |
897 res->volatil = res->unch_memory = 0; | |
898 res->cc = 0; | |
899 | |
900 /* See if we have computed this value already. */ | |
901 if (target_hash_table != NULL) | |
902 { | |
903 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
904 tinfo; tinfo = tinfo->next) | |
905 if (tinfo->uid == INSN_UID (target)) | |
906 break; | |
907 | |
908 /* Start by getting the basic block number. If we have saved | |
909 information, we can get it from there unless the insn at the | |
910 start of the basic block has been deleted. */ | |
911 if (tinfo && tinfo->block != -1 | |
912 && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block)))) | |
913 b = tinfo->block; | |
914 } | |
915 | |
916 if (b == -1) | |
917 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH); | |
918 | |
919 if (target_hash_table != NULL) | |
920 { | |
921 if (tinfo) | |
922 { | |
923 /* If the information is up-to-date, use it. Otherwise, we will | |
924 update it below. */ | |
925 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
926 { | |
927 COPY_HARD_REG_SET (res->regs, tinfo->live_regs); | |
928 return; | |
929 } | |
930 } | |
931 else | |
932 { | |
933 /* Allocate a place to put our results and chain it into the | |
934 hash table. */ | |
935 tinfo = XNEW (struct target_info); | |
936 tinfo->uid = INSN_UID (target); | |
937 tinfo->block = b; | |
938 tinfo->next | |
939 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
940 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; | |
941 } | |
942 } | |
943 | |
944 CLEAR_HARD_REG_SET (pending_dead_regs); | |
945 | |
946 /* If we found a basic block, get the live registers from it and update | |
947 them with anything set or killed between its start and the insn before | |
36 | 948 TARGET; this custom life analysis is really about registers so we need |
949 to use the LR problem. Otherwise, we must assume everything is live. */ | |
0 | 950 if (b != -1) |
951 { | |
36 | 952 regset regs_live = DF_LR_IN (BASIC_BLOCK (b)); |
0 | 953 rtx start_insn, stop_insn; |
954 | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
955 /* Compute hard regs live at start of block. */ |
0 | 956 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); |
957 | |
958 /* Get starting and ending insn, handling the case where each might | |
959 be a SEQUENCE. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
960 start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ? |
0 | 961 insns : BB_HEAD (BASIC_BLOCK (b))); |
962 stop_insn = target; | |
963 | |
964 if (NONJUMP_INSN_P (start_insn) | |
965 && GET_CODE (PATTERN (start_insn)) == SEQUENCE) | |
966 start_insn = XVECEXP (PATTERN (start_insn), 0, 0); | |
967 | |
968 if (NONJUMP_INSN_P (stop_insn) | |
969 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) | |
970 stop_insn = next_insn (PREV_INSN (stop_insn)); | |
971 | |
972 for (insn = start_insn; insn != stop_insn; | |
973 insn = next_insn_no_annul (insn)) | |
974 { | |
975 rtx link; | |
976 rtx real_insn = insn; | |
977 enum rtx_code code = GET_CODE (insn); | |
978 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
979 if (DEBUG_INSN_P (insn)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
980 continue; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
981 |
0 | 982 /* If this insn is from the target of a branch, it isn't going to |
983 be used in the sequel. If it is used in both cases, this | |
984 test will not be true. */ | |
985 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN) | |
986 && INSN_FROM_TARGET_P (insn)) | |
987 continue; | |
988 | |
989 /* If this insn is a USE made by update_block, we care about the | |
990 underlying insn. */ | |
991 if (code == INSN && GET_CODE (PATTERN (insn)) == USE | |
992 && INSN_P (XEXP (PATTERN (insn), 0))) | |
993 real_insn = XEXP (PATTERN (insn), 0); | |
994 | |
995 if (CALL_P (real_insn)) | |
996 { | |
997 /* CALL clobbers all call-used regs that aren't fixed except | |
998 sp, ap, and fp. Do this before setting the result of the | |
999 call live. */ | |
1000 AND_COMPL_HARD_REG_SET (current_live_regs, | |
1001 regs_invalidated_by_call); | |
1002 | |
1003 /* A CALL_INSN sets any global register live, since it may | |
1004 have been modified by the call. */ | |
1005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1006 if (global_regs[i]) | |
1007 SET_HARD_REG_BIT (current_live_regs, i); | |
1008 } | |
1009 | |
1010 /* Mark anything killed in an insn to be deadened at the next | |
1011 label. Ignore USE insns; the only REG_DEAD notes will be for | |
1012 parameters. But they might be early. A CALL_INSN will usually | |
1013 clobber registers used for parameters. It isn't worth bothering | |
1014 with the unlikely case when it won't. */ | |
1015 if ((NONJUMP_INSN_P (real_insn) | |
1016 && GET_CODE (PATTERN (real_insn)) != USE | |
1017 && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
1018 || JUMP_P (real_insn) | |
1019 || CALL_P (real_insn)) | |
1020 { | |
1021 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1022 if (REG_NOTE_KIND (link) == REG_DEAD | |
1023 && REG_P (XEXP (link, 0)) | |
1024 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1025 add_to_hard_reg_set (&pending_dead_regs, | |
1026 GET_MODE (XEXP (link, 0)), | |
1027 REGNO (XEXP (link, 0))); | |
1028 | |
1029 note_stores (PATTERN (real_insn), update_live_status, NULL); | |
1030 | |
1031 /* If any registers were unused after this insn, kill them. | |
1032 These notes will always be accurate. */ | |
1033 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1034 if (REG_NOTE_KIND (link) == REG_UNUSED | |
1035 && REG_P (XEXP (link, 0)) | |
1036 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1037 remove_from_hard_reg_set (¤t_live_regs, | |
1038 GET_MODE (XEXP (link, 0)), | |
1039 REGNO (XEXP (link, 0))); | |
1040 } | |
1041 | |
1042 else if (LABEL_P (real_insn)) | |
1043 { | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1044 basic_block bb; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1045 |
0 | 1046 /* A label clobbers the pending dead registers since neither |
1047 reload nor jump will propagate a value across a label. */ | |
1048 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs); | |
1049 CLEAR_HARD_REG_SET (pending_dead_regs); | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1050 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1051 /* We must conservatively assume that all registers that used |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1052 to be live here still are. The fallthrough edge may have |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1053 left a live register uninitialized. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1054 bb = BLOCK_FOR_INSN (real_insn); |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1055 if (bb) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1056 { |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1057 HARD_REG_SET extra_live; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1058 |
36 | 1059 REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb)); |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1060 IOR_HARD_REG_SET (current_live_regs, extra_live); |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1061 } |
0 | 1062 } |
1063 | |
1064 /* The beginning of the epilogue corresponds to the end of the | |
1065 RTL chain when there are no epilogue insns. Certain resources | |
1066 are implicitly required at that point. */ | |
1067 else if (NOTE_P (real_insn) | |
1068 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG) | |
1069 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs); | |
1070 } | |
1071 | |
1072 COPY_HARD_REG_SET (res->regs, current_live_regs); | |
1073 if (tinfo != NULL) | |
1074 { | |
1075 tinfo->block = b; | |
1076 tinfo->bb_tick = bb_ticks[b]; | |
1077 } | |
1078 } | |
1079 else | |
1080 /* We didn't find the start of a basic block. Assume everything | |
1081 in use. This should happen only extremely rarely. */ | |
1082 SET_HARD_REG_SET (res->regs); | |
1083 | |
1084 CLEAR_RESOURCE (&set); | |
1085 CLEAR_RESOURCE (&needed); | |
1086 | |
1087 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0, | |
1088 set, needed); | |
1089 | |
1090 /* If we hit an unconditional branch, we have another way of finding out | |
1091 what is live: we can see what is live at the branch target and include | |
1092 anything used but not set before the branch. We add the live | |
1093 resources found using the test below to those found until now. */ | |
1094 | |
1095 if (jump_insn) | |
1096 { | |
1097 struct resources new_resources; | |
1098 rtx stop_insn = next_active_insn (jump_insn); | |
1099 | |
1100 mark_target_live_regs (insns, next_active_insn (jump_target), | |
1101 &new_resources); | |
1102 CLEAR_RESOURCE (&set); | |
1103 CLEAR_RESOURCE (&needed); | |
1104 | |
1105 /* Include JUMP_INSN in the needed registers. */ | |
1106 for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1107 { | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1108 mark_referenced_resources (insn, &needed, true); |
0 | 1109 |
1110 COPY_HARD_REG_SET (scratch, needed.regs); | |
1111 AND_COMPL_HARD_REG_SET (scratch, set.regs); | |
1112 IOR_HARD_REG_SET (new_resources.regs, scratch); | |
1113 | |
1114 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
1115 } | |
1116 | |
1117 IOR_HARD_REG_SET (res->regs, new_resources.regs); | |
1118 } | |
1119 | |
1120 if (tinfo != NULL) | |
1121 { | |
1122 COPY_HARD_REG_SET (tinfo->live_regs, res->regs); | |
1123 } | |
1124 } | |
1125 | |
1126 /* Initialize the resources required by mark_target_live_regs (). | |
1127 This should be invoked before the first call to mark_target_live_regs. */ | |
1128 | |
1129 void | |
1130 init_resource_info (rtx epilogue_insn) | |
1131 { | |
1132 int i; | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1133 basic_block bb; |
0 | 1134 |
1135 /* Indicate what resources are required to be valid at the end of the current | |
1136 function. The condition code never is and memory always is. If the | |
1137 frame pointer is needed, it is and so is the stack pointer unless | |
1138 EXIT_IGNORE_STACK is nonzero. If the frame pointer is not needed, the | |
1139 stack pointer is. Registers used to return the function value are | |
1140 needed. Registers holding global variables are needed. */ | |
1141 | |
1142 end_of_function_needs.cc = 0; | |
1143 end_of_function_needs.memory = 1; | |
1144 end_of_function_needs.unch_memory = 0; | |
1145 CLEAR_HARD_REG_SET (end_of_function_needs.regs); | |
1146 | |
1147 if (frame_pointer_needed) | |
1148 { | |
1149 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
1150 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
1151 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM); | |
1152 #endif | |
1153 if (! EXIT_IGNORE_STACK | |
1154 || current_function_sp_is_unchanging) | |
1155 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1156 } | |
1157 else | |
1158 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); | |
1159 | |
1160 if (crtl->return_rtx != 0) | |
1161 mark_referenced_resources (crtl->return_rtx, | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1162 &end_of_function_needs, true); |
0 | 1163 |
1164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1165 if (global_regs[i] | |
1166 #ifdef EPILOGUE_USES | |
1167 || EPILOGUE_USES (i) | |
1168 #endif | |
1169 ) | |
1170 SET_HARD_REG_BIT (end_of_function_needs.regs, i); | |
1171 | |
1172 /* The registers required to be live at the end of the function are | |
1173 represented in the flow information as being dead just prior to | |
1174 reaching the end of the function. For example, the return of a value | |
1175 might be represented by a USE of the return register immediately | |
1176 followed by an unconditional jump to the return label where the | |
1177 return label is the end of the RTL chain. The end of the RTL chain | |
1178 is then taken to mean that the return register is live. | |
1179 | |
1180 This sequence is no longer maintained when epilogue instructions are | |
1181 added to the RTL chain. To reconstruct the original meaning, the | |
1182 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1183 point where these registers become live (start_of_epilogue_needs). | |
1184 If epilogue instructions are present, the registers set by those | |
1185 instructions won't have been processed by flow. Thus, those | |
1186 registers are additionally required at the end of the RTL chain | |
1187 (end_of_function_needs). */ | |
1188 | |
1189 start_of_epilogue_needs = end_of_function_needs; | |
1190 | |
1191 while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
1192 { | |
1193 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, | |
1194 MARK_SRC_DEST_CALL); | |
1195 if (return_insn_p (epilogue_insn)) | |
1196 break; | |
1197 } | |
1198 | |
1199 /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
1200 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME); | |
1201 bb_ticks = XCNEWVEC (int, last_basic_block); | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1202 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1203 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1204 FOR_EACH_BB (bb) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1205 if (LABEL_P (BB_HEAD (bb))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1206 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb; |
0 | 1207 } |
1208 | |
1209 /* Free up the resources allocated to mark_target_live_regs (). This | |
1210 should be invoked after the last call to mark_target_live_regs (). */ | |
1211 | |
1212 void | |
1213 free_resource_info (void) | |
1214 { | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1215 basic_block bb; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1216 |
0 | 1217 if (target_hash_table != NULL) |
1218 { | |
1219 int i; | |
1220 | |
1221 for (i = 0; i < TARGET_HASH_PRIME; ++i) | |
1222 { | |
1223 struct target_info *ti = target_hash_table[i]; | |
1224 | |
1225 while (ti) | |
1226 { | |
1227 struct target_info *next = ti->next; | |
1228 free (ti); | |
1229 ti = next; | |
1230 } | |
1231 } | |
1232 | |
1233 free (target_hash_table); | |
1234 target_hash_table = NULL; | |
1235 } | |
1236 | |
1237 if (bb_ticks != NULL) | |
1238 { | |
1239 free (bb_ticks); | |
1240 bb_ticks = NULL; | |
1241 } | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1242 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1243 FOR_EACH_BB (bb) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1244 if (LABEL_P (BB_HEAD (bb))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1245 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL; |
0 | 1246 } |
1247 | |
1248 /* Clear any hashed information that we have stored for INSN. */ | |
1249 | |
1250 void | |
1251 clear_hashed_info_for_insn (rtx insn) | |
1252 { | |
1253 struct target_info *tinfo; | |
1254 | |
1255 if (target_hash_table != NULL) | |
1256 { | |
1257 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1258 tinfo; tinfo = tinfo->next) | |
1259 if (tinfo->uid == INSN_UID (insn)) | |
1260 break; | |
1261 | |
1262 if (tinfo) | |
1263 tinfo->block = -1; | |
1264 } | |
1265 } | |
1266 | |
1267 /* Increment the tick count for the basic block that contains INSN. */ | |
1268 | |
1269 void | |
1270 incr_ticks_for_insn (rtx insn) | |
1271 { | |
1272 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH); | |
1273 | |
1274 if (b != -1) | |
1275 bb_ticks[b]++; | |
1276 } | |
1277 | |
1278 /* Add TRIAL to the set of resources used at the end of the current | |
1279 function. */ | |
1280 void | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1281 mark_end_of_function_resources (rtx trial, bool include_delayed_effects) |
0 | 1282 { |
1283 mark_referenced_resources (trial, &end_of_function_needs, | |
1284 include_delayed_effects); | |
1285 } |