Mercurial > hg > CbC > CbC_gcc
annotate gcc/stmt.c @ 22:0eb6cac880f0
add cbc example of quicksort.
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 13 Oct 2009 17:15:58 +0900 |
parents | 58ad6c70ea60 |
children | 77e2b8dfacca |
rev | line source |
---|---|
0 | 1 /* Expands front end tree to back end RTL for GCC |
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, | |
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 /* This file handles the generation of rtl code from tree structure | |
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c. | |
24 The functions whose names start with `expand_' are called by the | |
25 expander to generate RTL instructions for various kinds of constructs. */ | |
26 | |
27 #include "config.h" | |
28 #include "system.h" | |
29 #include "coretypes.h" | |
30 #include "tm.h" | |
31 | |
32 #include "rtl.h" | |
33 #include "hard-reg-set.h" | |
34 #include "tree.h" | |
35 #include "tm_p.h" | |
36 #include "flags.h" | |
37 #include "except.h" | |
38 #include "function.h" | |
39 #include "insn-config.h" | |
40 #include "expr.h" | |
41 #include "libfuncs.h" | |
42 #include "recog.h" | |
43 #include "machmode.h" | |
44 #include "toplev.h" | |
45 #include "output.h" | |
46 #include "ggc.h" | |
47 #include "langhooks.h" | |
48 #include "predict.h" | |
49 #include "optabs.h" | |
50 #include "target.h" | |
51 #include "regs.h" | |
52 #include "alloc-pool.h" | |
53 | |
54 /* Functions and data structures for expanding case statements. */ | |
55 | |
56 /* Case label structure, used to hold info on labels within case | |
57 statements. We handle "range" labels; for a single-value label | |
58 as in C, the high and low limits are the same. | |
59 | |
60 We start with a vector of case nodes sorted in ascending order, and | |
61 the default label as the last element in the vector. Before expanding | |
62 to RTL, we transform this vector into a list linked via the RIGHT | |
63 fields in the case_node struct. Nodes with higher case values are | |
64 later in the list. | |
65 | |
66 Switch statements can be output in three forms. A branch table is | |
67 used if there are more than a few labels and the labels are dense | |
68 within the range between the smallest and largest case value. If a | |
69 branch table is used, no further manipulations are done with the case | |
70 node chain. | |
71 | |
72 The alternative to the use of a branch table is to generate a series | |
73 of compare and jump insns. When that is done, we use the LEFT, RIGHT, | |
74 and PARENT fields to hold a binary tree. Initially the tree is | |
75 totally unbalanced, with everything on the right. We balance the tree | |
76 with nodes on the left having lower case values than the parent | |
77 and nodes on the right having higher values. We then output the tree | |
78 in order. | |
79 | |
80 For very small, suitable switch statements, we can generate a series | |
81 of simple bit test and branches instead. */ | |
82 | |
83 struct case_node | |
84 { | |
85 struct case_node *left; /* Left son in binary tree */ | |
86 struct case_node *right; /* Right son in binary tree; also node chain */ | |
87 struct case_node *parent; /* Parent of node in binary tree */ | |
88 tree low; /* Lowest index value for this label */ | |
89 tree high; /* Highest index value for this label */ | |
90 tree code_label; /* Label to jump to when node matches */ | |
91 }; | |
92 | |
93 typedef struct case_node case_node; | |
94 typedef struct case_node *case_node_ptr; | |
95 | |
96 /* These are used by estimate_case_costs and balance_case_nodes. */ | |
97 | |
98 /* This must be a signed type, and non-ANSI compilers lack signed char. */ | |
99 static short cost_table_[129]; | |
100 static int use_cost_table; | |
101 static int cost_table_initialized; | |
102 | |
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW | |
104 is unsigned. */ | |
105 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)] | |
106 | |
107 static int n_occurrences (int, const char *); | |
108 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *); | |
109 static void expand_nl_goto_receiver (void); | |
110 static bool check_operand_nalternatives (tree, tree); | |
111 static bool check_unique_operand_names (tree, tree); | |
112 static char *resolve_operand_name_1 (char *, tree, tree); | |
113 static void expand_null_return_1 (void); | |
114 static void expand_value_return (rtx); | |
115 static int estimate_case_costs (case_node_ptr); | |
116 static bool lshift_cheap_p (void); | |
117 static int case_bit_test_cmp (const void *, const void *); | |
118 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx); | |
119 static void balance_case_nodes (case_node_ptr *, case_node_ptr); | |
120 static int node_has_low_bound (case_node_ptr, tree); | |
121 static int node_has_high_bound (case_node_ptr, tree); | |
122 static int node_is_bounded (case_node_ptr, tree); | |
123 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree); | |
124 static struct case_node *add_case_node (struct case_node *, tree, | |
125 tree, tree, tree, alloc_pool); | |
126 | |
127 | |
128 /* Return the rtx-label that corresponds to a LABEL_DECL, | |
129 creating it if necessary. */ | |
130 | |
131 rtx | |
132 label_rtx (tree label) | |
133 { | |
134 gcc_assert (TREE_CODE (label) == LABEL_DECL); | |
135 | |
136 if (!DECL_RTL_SET_P (label)) | |
137 { | |
138 rtx r = gen_label_rtx (); | |
139 SET_DECL_RTL (label, r); | |
140 if (FORCED_LABEL (label) || DECL_NONLOCAL (label)) | |
141 LABEL_PRESERVE_P (r) = 1; | |
142 } | |
143 | |
144 return DECL_RTL (label); | |
145 } | |
146 | |
147 /* As above, but also put it on the forced-reference list of the | |
148 function that contains it. */ | |
149 rtx | |
150 force_label_rtx (tree label) | |
151 { | |
152 rtx ref = label_rtx (label); | |
153 tree function = decl_function_context (label); | |
154 | |
155 gcc_assert (function); | |
156 | |
157 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels); | |
158 return ref; | |
159 } | |
160 | |
161 /* Add an unconditional jump to LABEL as the next sequential instruction. */ | |
162 | |
163 void | |
164 emit_jump (rtx label) | |
165 { | |
166 do_pending_stack_adjust (); | |
167 emit_jump_insn (gen_jump (label)); | |
168 emit_barrier (); | |
169 } | |
170 | |
171 /* Emit code to jump to the address | |
172 specified by the pointer expression EXP. */ | |
173 | |
174 void | |
175 expand_computed_goto (tree exp) | |
176 { | |
177 rtx x = expand_normal (exp); | |
178 | |
179 x = convert_memory_address (Pmode, x); | |
180 | |
181 do_pending_stack_adjust (); | |
182 emit_indirect_jump (x); | |
183 } | |
184 | |
185 /* Handle goto statements and the labels that they can go to. */ | |
186 | |
187 /* Specify the location in the RTL code of a label LABEL, | |
188 which is a LABEL_DECL tree node. | |
189 | |
190 This is used for the kind of label that the user can jump to with a | |
191 goto statement, and for alternatives of a switch or case statement. | |
192 RTL labels generated for loops and conditionals don't go through here; | |
193 they are generated directly at the RTL level, by other functions below. | |
194 | |
195 Note that this has nothing to do with defining label *names*. | |
196 Languages vary in how they do that and what that even means. */ | |
197 | |
198 void | |
199 expand_label (tree label) | |
200 { | |
201 rtx label_r = label_rtx (label); | |
202 | |
203 do_pending_stack_adjust (); | |
204 emit_label (label_r); | |
205 if (DECL_NAME (label)) | |
206 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label)); | |
207 | |
208 if (DECL_NONLOCAL (label)) | |
209 { | |
210 expand_nl_goto_receiver (); | |
211 nonlocal_goto_handler_labels | |
212 = gen_rtx_EXPR_LIST (VOIDmode, label_r, | |
213 nonlocal_goto_handler_labels); | |
214 } | |
215 | |
216 if (FORCED_LABEL (label)) | |
217 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels); | |
218 | |
219 if (DECL_NONLOCAL (label) || FORCED_LABEL (label)) | |
220 maybe_set_first_label_num (label_r); | |
221 } | |
222 | |
223 /* Generate RTL code for a `goto' statement with target label LABEL. | |
224 LABEL should be a LABEL_DECL tree node that was or will later be | |
225 defined with `expand_label'. */ | |
226 | |
227 void | |
228 expand_goto (tree label) | |
229 { | |
230 #ifdef ENABLE_CHECKING | |
231 /* Check for a nonlocal goto to a containing function. Should have | |
232 gotten translated to __builtin_nonlocal_goto. */ | |
233 tree context = decl_function_context (label); | |
234 gcc_assert (!context || context == current_function_decl); | |
235 #endif | |
236 | |
237 emit_jump (label_rtx (label)); | |
238 } | |
239 | |
240 /* Return the number of times character C occurs in string S. */ | |
241 static int | |
242 n_occurrences (int c, const char *s) | |
243 { | |
244 int n = 0; | |
245 while (*s) | |
246 n += (*s++ == c); | |
247 return n; | |
248 } | |
249 | |
250 /* Generate RTL for an asm statement (explicit assembler code). | |
251 STRING is a STRING_CST node containing the assembler code text, | |
252 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the | |
253 insn is volatile; don't optimize it. */ | |
254 | |
255 static void | |
256 expand_asm_loc (tree string, int vol, location_t locus) | |
257 { | |
258 rtx body; | |
259 | |
260 if (TREE_CODE (string) == ADDR_EXPR) | |
261 string = TREE_OPERAND (string, 0); | |
262 | |
263 body = gen_rtx_ASM_INPUT_loc (VOIDmode, | |
264 ggc_strdup (TREE_STRING_POINTER (string)), | |
265 locus); | |
266 | |
267 MEM_VOLATILE_P (body) = vol; | |
268 | |
269 emit_insn (body); | |
270 } | |
271 | |
272 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the | |
273 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS | |
274 inputs and NOUTPUTS outputs to this extended-asm. Upon return, | |
275 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a | |
276 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the | |
277 constraint allows the use of a register operand. And, *IS_INOUT | |
278 will be true if the operand is read-write, i.e., if it is used as | |
279 an input as well as an output. If *CONSTRAINT_P is not in | |
280 canonical form, it will be made canonical. (Note that `+' will be | |
281 replaced with `=' as part of this process.) | |
282 | |
283 Returns TRUE if all went well; FALSE if an error occurred. */ | |
284 | |
285 bool | |
286 parse_output_constraint (const char **constraint_p, int operand_num, | |
287 int ninputs, int noutputs, bool *allows_mem, | |
288 bool *allows_reg, bool *is_inout) | |
289 { | |
290 const char *constraint = *constraint_p; | |
291 const char *p; | |
292 | |
293 /* Assume the constraint doesn't allow the use of either a register | |
294 or memory. */ | |
295 *allows_mem = false; | |
296 *allows_reg = false; | |
297 | |
298 /* Allow the `=' or `+' to not be at the beginning of the string, | |
299 since it wasn't explicitly documented that way, and there is a | |
300 large body of code that puts it last. Swap the character to | |
301 the front, so as not to uglify any place else. */ | |
302 p = strchr (constraint, '='); | |
303 if (!p) | |
304 p = strchr (constraint, '+'); | |
305 | |
306 /* If the string doesn't contain an `=', issue an error | |
307 message. */ | |
308 if (!p) | |
309 { | |
310 error ("output operand constraint lacks %<=%>"); | |
311 return false; | |
312 } | |
313 | |
314 /* If the constraint begins with `+', then the operand is both read | |
315 from and written to. */ | |
316 *is_inout = (*p == '+'); | |
317 | |
318 /* Canonicalize the output constraint so that it begins with `='. */ | |
319 if (p != constraint || *is_inout) | |
320 { | |
321 char *buf; | |
322 size_t c_len = strlen (constraint); | |
323 | |
324 if (p != constraint) | |
325 warning (0, "output constraint %qc for operand %d " | |
326 "is not at the beginning", | |
327 *p, operand_num); | |
328 | |
329 /* Make a copy of the constraint. */ | |
330 buf = XALLOCAVEC (char, c_len + 1); | |
331 strcpy (buf, constraint); | |
332 /* Swap the first character and the `=' or `+'. */ | |
333 buf[p - constraint] = buf[0]; | |
334 /* Make sure the first character is an `='. (Until we do this, | |
335 it might be a `+'.) */ | |
336 buf[0] = '='; | |
337 /* Replace the constraint with the canonicalized string. */ | |
338 *constraint_p = ggc_alloc_string (buf, c_len); | |
339 constraint = *constraint_p; | |
340 } | |
341 | |
342 /* Loop through the constraint string. */ | |
343 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p)) | |
344 switch (*p) | |
345 { | |
346 case '+': | |
347 case '=': | |
348 error ("operand constraint contains incorrectly positioned " | |
349 "%<+%> or %<=%>"); | |
350 return false; | |
351 | |
352 case '%': | |
353 if (operand_num + 1 == ninputs + noutputs) | |
354 { | |
355 error ("%<%%%> constraint used with last operand"); | |
356 return false; | |
357 } | |
358 break; | |
359 | |
360 case 'V': case TARGET_MEM_CONSTRAINT: case 'o': | |
361 *allows_mem = true; | |
362 break; | |
363 | |
364 case '?': case '!': case '*': case '&': case '#': | |
365 case 'E': case 'F': case 'G': case 'H': | |
366 case 's': case 'i': case 'n': | |
367 case 'I': case 'J': case 'K': case 'L': case 'M': | |
368 case 'N': case 'O': case 'P': case ',': | |
369 break; | |
370 | |
371 case '0': case '1': case '2': case '3': case '4': | |
372 case '5': case '6': case '7': case '8': case '9': | |
373 case '[': | |
374 error ("matching constraint not valid in output operand"); | |
375 return false; | |
376 | |
377 case '<': case '>': | |
378 /* ??? Before flow, auto inc/dec insns are not supposed to exist, | |
379 excepting those that expand_call created. So match memory | |
380 and hope. */ | |
381 *allows_mem = true; | |
382 break; | |
383 | |
384 case 'g': case 'X': | |
385 *allows_reg = true; | |
386 *allows_mem = true; | |
387 break; | |
388 | |
389 case 'p': case 'r': | |
390 *allows_reg = true; | |
391 break; | |
392 | |
393 default: | |
394 if (!ISALPHA (*p)) | |
395 break; | |
396 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS) | |
397 *allows_reg = true; | |
398 #ifdef EXTRA_CONSTRAINT_STR | |
399 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p)) | |
400 *allows_reg = true; | |
401 else if (EXTRA_MEMORY_CONSTRAINT (*p, p)) | |
402 *allows_mem = true; | |
403 else | |
404 { | |
405 /* Otherwise we can't assume anything about the nature of | |
406 the constraint except that it isn't purely registers. | |
407 Treat it like "g" and hope for the best. */ | |
408 *allows_reg = true; | |
409 *allows_mem = true; | |
410 } | |
411 #endif | |
412 break; | |
413 } | |
414 | |
415 return true; | |
416 } | |
417 | |
418 /* Similar, but for input constraints. */ | |
419 | |
420 bool | |
421 parse_input_constraint (const char **constraint_p, int input_num, | |
422 int ninputs, int noutputs, int ninout, | |
423 const char * const * constraints, | |
424 bool *allows_mem, bool *allows_reg) | |
425 { | |
426 const char *constraint = *constraint_p; | |
427 const char *orig_constraint = constraint; | |
428 size_t c_len = strlen (constraint); | |
429 size_t j; | |
430 bool saw_match = false; | |
431 | |
432 /* Assume the constraint doesn't allow the use of either | |
433 a register or memory. */ | |
434 *allows_mem = false; | |
435 *allows_reg = false; | |
436 | |
437 /* Make sure constraint has neither `=', `+', nor '&'. */ | |
438 | |
439 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j)) | |
440 switch (constraint[j]) | |
441 { | |
442 case '+': case '=': case '&': | |
443 if (constraint == orig_constraint) | |
444 { | |
445 error ("input operand constraint contains %qc", constraint[j]); | |
446 return false; | |
447 } | |
448 break; | |
449 | |
450 case '%': | |
451 if (constraint == orig_constraint | |
452 && input_num + 1 == ninputs - ninout) | |
453 { | |
454 error ("%<%%%> constraint used with last operand"); | |
455 return false; | |
456 } | |
457 break; | |
458 | |
459 case 'V': case TARGET_MEM_CONSTRAINT: case 'o': | |
460 *allows_mem = true; | |
461 break; | |
462 | |
463 case '<': case '>': | |
464 case '?': case '!': case '*': case '#': | |
465 case 'E': case 'F': case 'G': case 'H': | |
466 case 's': case 'i': case 'n': | |
467 case 'I': case 'J': case 'K': case 'L': case 'M': | |
468 case 'N': case 'O': case 'P': case ',': | |
469 break; | |
470 | |
471 /* Whether or not a numeric constraint allows a register is | |
472 decided by the matching constraint, and so there is no need | |
473 to do anything special with them. We must handle them in | |
474 the default case, so that we don't unnecessarily force | |
475 operands to memory. */ | |
476 case '0': case '1': case '2': case '3': case '4': | |
477 case '5': case '6': case '7': case '8': case '9': | |
478 { | |
479 char *end; | |
480 unsigned long match; | |
481 | |
482 saw_match = true; | |
483 | |
484 match = strtoul (constraint + j, &end, 10); | |
485 if (match >= (unsigned long) noutputs) | |
486 { | |
487 error ("matching constraint references invalid operand number"); | |
488 return false; | |
489 } | |
490 | |
491 /* Try and find the real constraint for this dup. Only do this | |
492 if the matching constraint is the only alternative. */ | |
493 if (*end == '\0' | |
494 && (j == 0 || (j == 1 && constraint[0] == '%'))) | |
495 { | |
496 constraint = constraints[match]; | |
497 *constraint_p = constraint; | |
498 c_len = strlen (constraint); | |
499 j = 0; | |
500 /* ??? At the end of the loop, we will skip the first part of | |
501 the matched constraint. This assumes not only that the | |
502 other constraint is an output constraint, but also that | |
503 the '=' or '+' come first. */ | |
504 break; | |
505 } | |
506 else | |
507 j = end - constraint; | |
508 /* Anticipate increment at end of loop. */ | |
509 j--; | |
510 } | |
511 /* Fall through. */ | |
512 | |
513 case 'p': case 'r': | |
514 *allows_reg = true; | |
515 break; | |
516 | |
517 case 'g': case 'X': | |
518 *allows_reg = true; | |
519 *allows_mem = true; | |
520 break; | |
521 | |
522 default: | |
523 if (! ISALPHA (constraint[j])) | |
524 { | |
525 error ("invalid punctuation %qc in constraint", constraint[j]); | |
526 return false; | |
527 } | |
528 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j) | |
529 != NO_REGS) | |
530 *allows_reg = true; | |
531 #ifdef EXTRA_CONSTRAINT_STR | |
532 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j)) | |
533 *allows_reg = true; | |
534 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j)) | |
535 *allows_mem = true; | |
536 else | |
537 { | |
538 /* Otherwise we can't assume anything about the nature of | |
539 the constraint except that it isn't purely registers. | |
540 Treat it like "g" and hope for the best. */ | |
541 *allows_reg = true; | |
542 *allows_mem = true; | |
543 } | |
544 #endif | |
545 break; | |
546 } | |
547 | |
548 if (saw_match && !*allows_reg) | |
549 warning (0, "matching constraint does not allow a register"); | |
550 | |
551 return true; | |
552 } | |
553 | |
554 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL | |
555 can be an asm-declared register. Called via walk_tree. */ | |
556 | |
557 static tree | |
558 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
559 void *data) | |
560 { | |
561 tree decl = *declp; | |
562 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data; | |
563 | |
564 if (TREE_CODE (decl) == VAR_DECL) | |
565 { | |
566 if (DECL_HARD_REGISTER (decl) | |
567 && REG_P (DECL_RTL (decl)) | |
568 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER) | |
569 { | |
570 rtx reg = DECL_RTL (decl); | |
571 | |
572 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg))) | |
573 return decl; | |
574 } | |
575 walk_subtrees = 0; | |
576 } | |
577 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL) | |
578 walk_subtrees = 0; | |
579 return NULL_TREE; | |
580 } | |
581 | |
582 /* If there is an overlap between *REGS and DECL, return the first overlap | |
583 found. */ | |
584 tree | |
585 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs) | |
586 { | |
587 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL); | |
588 } | |
589 | |
590 /* Check for overlap between registers marked in CLOBBERED_REGS and | |
591 anything inappropriate in T. Emit error and return the register | |
592 variable definition for error, NULL_TREE for ok. */ | |
593 | |
594 static bool | |
595 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs) | |
596 { | |
597 /* Conflicts between asm-declared register variables and the clobber | |
598 list are not allowed. */ | |
599 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs); | |
600 | |
601 if (overlap) | |
602 { | |
603 error ("asm-specifier for variable %qs conflicts with asm clobber list", | |
604 IDENTIFIER_POINTER (DECL_NAME (overlap))); | |
605 | |
606 /* Reset registerness to stop multiple errors emitted for a single | |
607 variable. */ | |
608 DECL_REGISTER (overlap) = 0; | |
609 return true; | |
610 } | |
611 | |
612 return false; | |
613 } | |
614 | |
615 /* Generate RTL for an asm statement with arguments. | |
616 STRING is the instruction template. | |
617 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. | |
618 Each output or input has an expression in the TREE_VALUE and | |
619 a tree list in TREE_PURPOSE which in turn contains a constraint | |
620 name in TREE_VALUE (or NULL_TREE) and a constraint string | |
621 in TREE_PURPOSE. | |
622 CLOBBERS is a list of STRING_CST nodes each naming a hard register | |
623 that is clobbered by this insn. | |
624 | |
625 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. | |
626 Some elements of OUTPUTS may be replaced with trees representing temporary | |
627 values. The caller should copy those temporary values to the originally | |
628 specified lvalues. | |
629 | |
630 VOL nonzero means the insn is volatile; don't optimize it. */ | |
631 | |
632 static void | |
633 expand_asm_operands (tree string, tree outputs, tree inputs, | |
634 tree clobbers, int vol, location_t locus) | |
635 { | |
636 rtvec argvec, constraintvec; | |
637 rtx body; | |
638 int ninputs = list_length (inputs); | |
639 int noutputs = list_length (outputs); | |
640 int ninout; | |
641 int nclobbers; | |
642 HARD_REG_SET clobbered_regs; | |
643 int clobber_conflict_found = 0; | |
644 tree tail; | |
645 tree t; | |
646 int i; | |
647 /* Vector of RTX's of evaluated output operands. */ | |
648 rtx *output_rtx = XALLOCAVEC (rtx, noutputs); | |
649 int *inout_opnum = XALLOCAVEC (int, noutputs); | |
650 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs); | |
651 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs); | |
652 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs); | |
653 int old_generating_concat_p = generating_concat_p; | |
654 | |
655 /* An ASM with no outputs needs to be treated as volatile, for now. */ | |
656 if (noutputs == 0) | |
657 vol = 1; | |
658 | |
659 if (! check_operand_nalternatives (outputs, inputs)) | |
660 return; | |
661 | |
662 string = resolve_asm_operand_names (string, outputs, inputs); | |
663 | |
664 /* Collect constraints. */ | |
665 i = 0; | |
666 for (t = outputs; t ; t = TREE_CHAIN (t), i++) | |
667 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
668 for (t = inputs; t ; t = TREE_CHAIN (t), i++) | |
669 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
670 | |
671 /* Sometimes we wish to automatically clobber registers across an asm. | |
672 Case in point is when the i386 backend moved from cc0 to a hard reg -- | |
673 maintaining source-level compatibility means automatically clobbering | |
674 the flags register. */ | |
675 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers); | |
676 | |
677 /* Count the number of meaningful clobbered registers, ignoring what | |
678 we would ignore later. */ | |
679 nclobbers = 0; | |
680 CLEAR_HARD_REG_SET (clobbered_regs); | |
681 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) | |
682 { | |
683 const char *regname; | |
684 | |
685 if (TREE_VALUE (tail) == error_mark_node) | |
686 return; | |
687 regname = TREE_STRING_POINTER (TREE_VALUE (tail)); | |
688 | |
689 i = decode_reg_name (regname); | |
690 if (i >= 0 || i == -4) | |
691 ++nclobbers; | |
692 else if (i == -2) | |
693 error ("unknown register name %qs in %<asm%>", regname); | |
694 | |
695 /* Mark clobbered registers. */ | |
696 if (i >= 0) | |
697 { | |
698 /* Clobbering the PIC register is an error. */ | |
699 if (i == (int) PIC_OFFSET_TABLE_REGNUM) | |
700 { | |
701 error ("PIC register %qs clobbered in %<asm%>", regname); | |
702 return; | |
703 } | |
704 | |
705 SET_HARD_REG_BIT (clobbered_regs, i); | |
706 } | |
707 } | |
708 | |
709 /* First pass over inputs and outputs checks validity and sets | |
710 mark_addressable if needed. */ | |
711 | |
712 ninout = 0; | |
713 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
714 { | |
715 tree val = TREE_VALUE (tail); | |
716 tree type = TREE_TYPE (val); | |
717 const char *constraint; | |
718 bool is_inout; | |
719 bool allows_reg; | |
720 bool allows_mem; | |
721 | |
722 /* If there's an erroneous arg, emit no insn. */ | |
723 if (type == error_mark_node) | |
724 return; | |
725 | |
726 /* Try to parse the output constraint. If that fails, there's | |
727 no point in going further. */ | |
728 constraint = constraints[i]; | |
729 if (!parse_output_constraint (&constraint, i, ninputs, noutputs, | |
730 &allows_mem, &allows_reg, &is_inout)) | |
731 return; | |
732 | |
733 if (! allows_reg | |
734 && (allows_mem | |
735 || is_inout | |
736 || (DECL_P (val) | |
737 && REG_P (DECL_RTL (val)) | |
738 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))) | |
739 lang_hooks.mark_addressable (val); | |
740 | |
741 if (is_inout) | |
742 ninout++; | |
743 } | |
744 | |
745 ninputs += ninout; | |
746 if (ninputs + noutputs > MAX_RECOG_OPERANDS) | |
747 { | |
748 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS); | |
749 return; | |
750 } | |
751 | |
752 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail)) | |
753 { | |
754 bool allows_reg, allows_mem; | |
755 const char *constraint; | |
756 | |
757 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT | |
758 would get VOIDmode and that could cause a crash in reload. */ | |
759 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node) | |
760 return; | |
761 | |
762 constraint = constraints[i + noutputs]; | |
763 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, | |
764 constraints, &allows_mem, &allows_reg)) | |
765 return; | |
766 | |
767 if (! allows_reg && allows_mem) | |
768 lang_hooks.mark_addressable (TREE_VALUE (tail)); | |
769 } | |
770 | |
771 /* Second pass evaluates arguments. */ | |
772 | |
773 ninout = 0; | |
774 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
775 { | |
776 tree val = TREE_VALUE (tail); | |
777 tree type = TREE_TYPE (val); | |
778 bool is_inout; | |
779 bool allows_reg; | |
780 bool allows_mem; | |
781 rtx op; | |
782 bool ok; | |
783 | |
784 ok = parse_output_constraint (&constraints[i], i, ninputs, | |
785 noutputs, &allows_mem, &allows_reg, | |
786 &is_inout); | |
787 gcc_assert (ok); | |
788 | |
789 /* If an output operand is not a decl or indirect ref and our constraint | |
790 allows a register, make a temporary to act as an intermediate. | |
791 Make the asm insn write into that, then our caller will copy it to | |
792 the real output operand. Likewise for promoted variables. */ | |
793 | |
794 generating_concat_p = 0; | |
795 | |
796 real_output_rtx[i] = NULL_RTX; | |
797 if ((TREE_CODE (val) == INDIRECT_REF | |
798 && allows_mem) | |
799 || (DECL_P (val) | |
800 && (allows_mem || REG_P (DECL_RTL (val))) | |
801 && ! (REG_P (DECL_RTL (val)) | |
802 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))) | |
803 || ! allows_reg | |
804 || is_inout) | |
805 { | |
806 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
807 if (MEM_P (op)) | |
808 op = validize_mem (op); | |
809 | |
810 if (! allows_reg && !MEM_P (op)) | |
811 error ("output number %d not directly addressable", i); | |
812 if ((! allows_mem && MEM_P (op)) | |
813 || GET_CODE (op) == CONCAT) | |
814 { | |
815 real_output_rtx[i] = op; | |
816 op = gen_reg_rtx (GET_MODE (op)); | |
817 if (is_inout) | |
818 emit_move_insn (op, real_output_rtx[i]); | |
819 } | |
820 } | |
821 else | |
822 { | |
823 op = assign_temp (type, 0, 0, 1); | |
824 op = validize_mem (op); | |
825 TREE_VALUE (tail) = make_tree (type, op); | |
826 } | |
827 output_rtx[i] = op; | |
828 | |
829 generating_concat_p = old_generating_concat_p; | |
830 | |
831 if (is_inout) | |
832 { | |
833 inout_mode[ninout] = TYPE_MODE (type); | |
834 inout_opnum[ninout++] = i; | |
835 } | |
836 | |
837 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) | |
838 clobber_conflict_found = 1; | |
839 } | |
840 | |
841 /* Make vectors for the expression-rtx, constraint strings, | |
842 and named operands. */ | |
843 | |
844 argvec = rtvec_alloc (ninputs); | |
845 constraintvec = rtvec_alloc (ninputs); | |
846 | |
847 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode | |
848 : GET_MODE (output_rtx[0])), | |
849 ggc_strdup (TREE_STRING_POINTER (string)), | |
850 empty_string, 0, argvec, constraintvec, | |
851 locus); | |
852 | |
853 MEM_VOLATILE_P (body) = vol; | |
854 | |
855 /* Eval the inputs and put them into ARGVEC. | |
856 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */ | |
857 | |
858 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i) | |
859 { | |
860 bool allows_reg, allows_mem; | |
861 const char *constraint; | |
862 tree val, type; | |
863 rtx op; | |
864 bool ok; | |
865 | |
866 constraint = constraints[i + noutputs]; | |
867 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, | |
868 constraints, &allows_mem, &allows_reg); | |
869 gcc_assert (ok); | |
870 | |
871 generating_concat_p = 0; | |
872 | |
873 val = TREE_VALUE (tail); | |
874 type = TREE_TYPE (val); | |
875 /* EXPAND_INITIALIZER will not generate code for valid initializer | |
876 constants, but will still generate code for other types of operand. | |
877 This is the behavior we want for constant constraints. */ | |
878 op = expand_expr (val, NULL_RTX, VOIDmode, | |
879 allows_reg ? EXPAND_NORMAL | |
880 : allows_mem ? EXPAND_MEMORY | |
881 : EXPAND_INITIALIZER); | |
882 | |
883 /* Never pass a CONCAT to an ASM. */ | |
884 if (GET_CODE (op) == CONCAT) | |
885 op = force_reg (GET_MODE (op), op); | |
886 else if (MEM_P (op)) | |
887 op = validize_mem (op); | |
888 | |
889 if (asm_operand_ok (op, constraint, NULL) <= 0) | |
890 { | |
891 if (allows_reg && TYPE_MODE (type) != BLKmode) | |
892 op = force_reg (TYPE_MODE (type), op); | |
893 else if (!allows_mem) | |
894 warning (0, "asm operand %d probably doesn%'t match constraints", | |
895 i + noutputs); | |
896 else if (MEM_P (op)) | |
897 { | |
898 /* We won't recognize either volatile memory or memory | |
899 with a queued address as available a memory_operand | |
900 at this point. Ignore it: clearly this *is* a memory. */ | |
901 } | |
902 else | |
903 { | |
904 warning (0, "use of memory input without lvalue in " | |
905 "asm operand %d is deprecated", i + noutputs); | |
906 | |
907 if (CONSTANT_P (op)) | |
908 { | |
909 rtx mem = force_const_mem (TYPE_MODE (type), op); | |
910 if (mem) | |
911 op = validize_mem (mem); | |
912 else | |
913 op = force_reg (TYPE_MODE (type), op); | |
914 } | |
915 if (REG_P (op) | |
916 || GET_CODE (op) == SUBREG | |
917 || GET_CODE (op) == CONCAT) | |
918 { | |
919 tree qual_type = build_qualified_type (type, | |
920 (TYPE_QUALS (type) | |
921 | TYPE_QUAL_CONST)); | |
922 rtx memloc = assign_temp (qual_type, 1, 1, 1); | |
923 memloc = validize_mem (memloc); | |
924 emit_move_insn (memloc, op); | |
925 op = memloc; | |
926 } | |
927 } | |
928 } | |
929 | |
930 generating_concat_p = old_generating_concat_p; | |
931 ASM_OPERANDS_INPUT (body, i) = op; | |
932 | |
933 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i) | |
934 = gen_rtx_ASM_INPUT (TYPE_MODE (type), | |
935 ggc_strdup (constraints[i + noutputs])); | |
936 | |
937 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) | |
938 clobber_conflict_found = 1; | |
939 } | |
940 | |
941 /* Protect all the operands from the queue now that they have all been | |
942 evaluated. */ | |
943 | |
944 generating_concat_p = 0; | |
945 | |
946 /* For in-out operands, copy output rtx to input rtx. */ | |
947 for (i = 0; i < ninout; i++) | |
948 { | |
949 int j = inout_opnum[i]; | |
950 char buffer[16]; | |
951 | |
952 ASM_OPERANDS_INPUT (body, ninputs - ninout + i) | |
953 = output_rtx[j]; | |
954 | |
955 sprintf (buffer, "%d", j); | |
956 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i) | |
957 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer)); | |
958 } | |
959 | |
960 generating_concat_p = old_generating_concat_p; | |
961 | |
962 /* Now, for each output, construct an rtx | |
963 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER | |
964 ARGVEC CONSTRAINTS OPNAMES)) | |
965 If there is more than one, put them inside a PARALLEL. */ | |
966 | |
967 if (noutputs == 1 && nclobbers == 0) | |
968 { | |
969 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]); | |
970 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body)); | |
971 } | |
972 | |
973 else if (noutputs == 0 && nclobbers == 0) | |
974 { | |
975 /* No output operands: put in a raw ASM_OPERANDS rtx. */ | |
976 emit_insn (body); | |
977 } | |
978 | |
979 else | |
980 { | |
981 rtx obody = body; | |
982 int num = noutputs; | |
983 | |
984 if (num == 0) | |
985 num = 1; | |
986 | |
987 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers)); | |
988 | |
989 /* For each output operand, store a SET. */ | |
990 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
991 { | |
992 XVECEXP (body, 0, i) | |
993 = gen_rtx_SET (VOIDmode, | |
994 output_rtx[i], | |
995 gen_rtx_ASM_OPERANDS | |
996 (GET_MODE (output_rtx[i]), | |
997 ggc_strdup (TREE_STRING_POINTER (string)), | |
998 ggc_strdup (constraints[i]), | |
999 i, argvec, constraintvec, locus)); | |
1000 | |
1001 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol; | |
1002 } | |
1003 | |
1004 /* If there are no outputs (but there are some clobbers) | |
1005 store the bare ASM_OPERANDS into the PARALLEL. */ | |
1006 | |
1007 if (i == 0) | |
1008 XVECEXP (body, 0, i++) = obody; | |
1009 | |
1010 /* Store (clobber REG) for each clobbered register specified. */ | |
1011 | |
1012 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) | |
1013 { | |
1014 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); | |
1015 int j = decode_reg_name (regname); | |
1016 rtx clobbered_reg; | |
1017 | |
1018 if (j < 0) | |
1019 { | |
1020 if (j == -3) /* `cc', which is not a register */ | |
1021 continue; | |
1022 | |
1023 if (j == -4) /* `memory', don't cache memory across asm */ | |
1024 { | |
1025 XVECEXP (body, 0, i++) | |
1026 = gen_rtx_CLOBBER (VOIDmode, | |
1027 gen_rtx_MEM | |
1028 (BLKmode, | |
1029 gen_rtx_SCRATCH (VOIDmode))); | |
1030 continue; | |
1031 } | |
1032 | |
1033 /* Ignore unknown register, error already signaled. */ | |
1034 continue; | |
1035 } | |
1036 | |
1037 /* Use QImode since that's guaranteed to clobber just one reg. */ | |
1038 clobbered_reg = gen_rtx_REG (QImode, j); | |
1039 | |
1040 /* Do sanity check for overlap between clobbers and respectively | |
1041 input and outputs that hasn't been handled. Such overlap | |
1042 should have been detected and reported above. */ | |
1043 if (!clobber_conflict_found) | |
1044 { | |
1045 int opno; | |
1046 | |
1047 /* We test the old body (obody) contents to avoid tripping | |
1048 over the under-construction body. */ | |
1049 for (opno = 0; opno < noutputs; opno++) | |
1050 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno])) | |
1051 internal_error ("asm clobber conflict with output operand"); | |
1052 | |
1053 for (opno = 0; opno < ninputs - ninout; opno++) | |
1054 if (reg_overlap_mentioned_p (clobbered_reg, | |
1055 ASM_OPERANDS_INPUT (obody, opno))) | |
1056 internal_error ("asm clobber conflict with input operand"); | |
1057 } | |
1058 | |
1059 XVECEXP (body, 0, i++) | |
1060 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); | |
1061 } | |
1062 | |
1063 emit_insn (body); | |
1064 } | |
1065 | |
1066 /* For any outputs that needed reloading into registers, spill them | |
1067 back to where they belong. */ | |
1068 for (i = 0; i < noutputs; ++i) | |
1069 if (real_output_rtx[i]) | |
1070 emit_move_insn (real_output_rtx[i], output_rtx[i]); | |
1071 | |
1072 crtl->has_asm_statement = 1; | |
1073 free_temp_slots (); | |
1074 } | |
1075 | |
1076 void | |
1077 expand_asm_expr (tree exp) | |
1078 { | |
1079 int noutputs, i; | |
1080 tree outputs, tail; | |
1081 tree *o; | |
1082 | |
1083 if (ASM_INPUT_P (exp)) | |
1084 { | |
1085 expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location); | |
1086 return; | |
1087 } | |
1088 | |
1089 outputs = ASM_OUTPUTS (exp); | |
1090 noutputs = list_length (outputs); | |
1091 /* o[I] is the place that output number I should be written. */ | |
1092 o = (tree *) alloca (noutputs * sizeof (tree)); | |
1093 | |
1094 /* Record the contents of OUTPUTS before it is modified. */ | |
1095 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
1096 o[i] = TREE_VALUE (tail); | |
1097 | |
1098 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of | |
1099 OUTPUTS some trees for where the values were actually stored. */ | |
1100 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp), | |
1101 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp), | |
1102 input_location); | |
1103 | |
1104 /* Copy all the intermediate outputs into the specified outputs. */ | |
1105 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
1106 { | |
1107 if (o[i] != TREE_VALUE (tail)) | |
1108 { | |
1109 expand_assignment (o[i], TREE_VALUE (tail), false); | |
1110 free_temp_slots (); | |
1111 | |
1112 /* Restore the original value so that it's correct the next | |
1113 time we expand this function. */ | |
1114 TREE_VALUE (tail) = o[i]; | |
1115 } | |
1116 } | |
1117 } | |
1118 | |
1119 /* A subroutine of expand_asm_operands. Check that all operands have | |
1120 the same number of alternatives. Return true if so. */ | |
1121 | |
1122 static bool | |
1123 check_operand_nalternatives (tree outputs, tree inputs) | |
1124 { | |
1125 if (outputs || inputs) | |
1126 { | |
1127 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs); | |
1128 int nalternatives | |
1129 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp))); | |
1130 tree next = inputs; | |
1131 | |
1132 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES) | |
1133 { | |
1134 error ("too many alternatives in %<asm%>"); | |
1135 return false; | |
1136 } | |
1137 | |
1138 tmp = outputs; | |
1139 while (tmp) | |
1140 { | |
1141 const char *constraint | |
1142 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp))); | |
1143 | |
1144 if (n_occurrences (',', constraint) != nalternatives) | |
1145 { | |
1146 error ("operand constraints for %<asm%> differ " | |
1147 "in number of alternatives"); | |
1148 return false; | |
1149 } | |
1150 | |
1151 if (TREE_CHAIN (tmp)) | |
1152 tmp = TREE_CHAIN (tmp); | |
1153 else | |
1154 tmp = next, next = 0; | |
1155 } | |
1156 } | |
1157 | |
1158 return true; | |
1159 } | |
1160 | |
1161 /* A subroutine of expand_asm_operands. Check that all operand names | |
1162 are unique. Return true if so. We rely on the fact that these names | |
1163 are identifiers, and so have been canonicalized by get_identifier, | |
1164 so all we need are pointer comparisons. */ | |
1165 | |
1166 static bool | |
1167 check_unique_operand_names (tree outputs, tree inputs) | |
1168 { | |
1169 tree i, j; | |
1170 | |
1171 for (i = outputs; i ; i = TREE_CHAIN (i)) | |
1172 { | |
1173 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); | |
1174 if (! i_name) | |
1175 continue; | |
1176 | |
1177 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) | |
1178 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1179 goto failure; | |
1180 } | |
1181 | |
1182 for (i = inputs; i ; i = TREE_CHAIN (i)) | |
1183 { | |
1184 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); | |
1185 if (! i_name) | |
1186 continue; | |
1187 | |
1188 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) | |
1189 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1190 goto failure; | |
1191 for (j = outputs; j ; j = TREE_CHAIN (j)) | |
1192 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1193 goto failure; | |
1194 } | |
1195 | |
1196 return true; | |
1197 | |
1198 failure: | |
1199 error ("duplicate asm operand name %qs", | |
1200 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i)))); | |
1201 return false; | |
1202 } | |
1203 | |
1204 /* A subroutine of expand_asm_operands. Resolve the names of the operands | |
1205 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in | |
1206 STRING and in the constraints to those numbers. */ | |
1207 | |
1208 tree | |
1209 resolve_asm_operand_names (tree string, tree outputs, tree inputs) | |
1210 { | |
1211 char *buffer; | |
1212 char *p; | |
1213 const char *c; | |
1214 tree t; | |
1215 | |
1216 check_unique_operand_names (outputs, inputs); | |
1217 | |
1218 /* Substitute [<name>] in input constraint strings. There should be no | |
1219 named operands in output constraints. */ | |
1220 for (t = inputs; t ; t = TREE_CHAIN (t)) | |
1221 { | |
1222 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
1223 if (strchr (c, '[') != NULL) | |
1224 { | |
1225 p = buffer = xstrdup (c); | |
1226 while ((p = strchr (p, '[')) != NULL) | |
1227 p = resolve_operand_name_1 (p, outputs, inputs); | |
1228 TREE_VALUE (TREE_PURPOSE (t)) | |
1229 = build_string (strlen (buffer), buffer); | |
1230 free (buffer); | |
1231 } | |
1232 } | |
1233 | |
1234 /* Now check for any needed substitutions in the template. */ | |
1235 c = TREE_STRING_POINTER (string); | |
1236 while ((c = strchr (c, '%')) != NULL) | |
1237 { | |
1238 if (c[1] == '[') | |
1239 break; | |
1240 else if (ISALPHA (c[1]) && c[2] == '[') | |
1241 break; | |
1242 else | |
1243 { | |
1244 c += 1; | |
1245 continue; | |
1246 } | |
1247 } | |
1248 | |
1249 if (c) | |
1250 { | |
1251 /* OK, we need to make a copy so we can perform the substitutions. | |
1252 Assume that we will not need extra space--we get to remove '[' | |
1253 and ']', which means we cannot have a problem until we have more | |
1254 than 999 operands. */ | |
1255 buffer = xstrdup (TREE_STRING_POINTER (string)); | |
1256 p = buffer + (c - TREE_STRING_POINTER (string)); | |
1257 | |
1258 while ((p = strchr (p, '%')) != NULL) | |
1259 { | |
1260 if (p[1] == '[') | |
1261 p += 1; | |
1262 else if (ISALPHA (p[1]) && p[2] == '[') | |
1263 p += 2; | |
1264 else | |
1265 { | |
1266 p += 1; | |
1267 continue; | |
1268 } | |
1269 | |
1270 p = resolve_operand_name_1 (p, outputs, inputs); | |
1271 } | |
1272 | |
1273 string = build_string (strlen (buffer), buffer); | |
1274 free (buffer); | |
1275 } | |
1276 | |
1277 return string; | |
1278 } | |
1279 | |
1280 /* A subroutine of resolve_operand_names. P points to the '[' for a | |
1281 potential named operand of the form [<name>]. In place, replace | |
1282 the name and brackets with a number. Return a pointer to the | |
1283 balance of the string after substitution. */ | |
1284 | |
1285 static char * | |
1286 resolve_operand_name_1 (char *p, tree outputs, tree inputs) | |
1287 { | |
1288 char *q; | |
1289 int op; | |
1290 tree t; | |
1291 size_t len; | |
1292 | |
1293 /* Collect the operand name. */ | |
1294 q = strchr (p, ']'); | |
1295 if (!q) | |
1296 { | |
1297 error ("missing close brace for named operand"); | |
1298 return strchr (p, '\0'); | |
1299 } | |
1300 len = q - p - 1; | |
1301 | |
1302 /* Resolve the name to a number. */ | |
1303 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++) | |
1304 { | |
1305 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); | |
1306 if (name) | |
1307 { | |
1308 const char *c = TREE_STRING_POINTER (name); | |
1309 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') | |
1310 goto found; | |
1311 } | |
1312 } | |
1313 for (t = inputs; t ; t = TREE_CHAIN (t), op++) | |
1314 { | |
1315 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); | |
1316 if (name) | |
1317 { | |
1318 const char *c = TREE_STRING_POINTER (name); | |
1319 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') | |
1320 goto found; | |
1321 } | |
1322 } | |
1323 | |
1324 *q = '\0'; | |
1325 error ("undefined named operand %qs", p + 1); | |
1326 op = 0; | |
1327 found: | |
1328 | |
1329 /* Replace the name with the number. Unfortunately, not all libraries | |
1330 get the return value of sprintf correct, so search for the end of the | |
1331 generated string by hand. */ | |
1332 sprintf (p, "%d", op); | |
1333 p = strchr (p, '\0'); | |
1334 | |
1335 /* Verify the no extra buffer space assumption. */ | |
1336 gcc_assert (p <= q); | |
1337 | |
1338 /* Shift the rest of the buffer down to fill the gap. */ | |
1339 memmove (p, q + 1, strlen (q + 1) + 1); | |
1340 | |
1341 return p; | |
1342 } | |
1343 | |
1344 /* Generate RTL to evaluate the expression EXP. */ | |
1345 | |
1346 void | |
1347 expand_expr_stmt (tree exp) | |
1348 { | |
1349 rtx value; | |
1350 tree type; | |
1351 | |
1352 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1353 type = TREE_TYPE (exp); | |
1354 | |
1355 /* If all we do is reference a volatile value in memory, | |
1356 copy it to a register to be sure it is actually touched. */ | |
1357 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp)) | |
1358 { | |
1359 if (TYPE_MODE (type) == VOIDmode) | |
1360 ; | |
1361 else if (TYPE_MODE (type) != BLKmode) | |
1362 value = copy_to_reg (value); | |
1363 else | |
1364 { | |
1365 rtx lab = gen_label_rtx (); | |
1366 | |
1367 /* Compare the value with itself to reference it. */ | |
1368 emit_cmp_and_jump_insns (value, value, EQ, | |
1369 expand_normal (TYPE_SIZE (type)), | |
1370 BLKmode, 0, lab); | |
1371 emit_label (lab); | |
1372 } | |
1373 } | |
1374 | |
1375 /* Free any temporaries used to evaluate this expression. */ | |
1376 free_temp_slots (); | |
1377 } | |
1378 | |
1379 /* Warn if EXP contains any computations whose results are not used. | |
1380 Return 1 if a warning is printed; 0 otherwise. LOCUS is the | |
1381 (potential) location of the expression. */ | |
1382 | |
1383 int | |
1384 warn_if_unused_value (const_tree exp, location_t locus) | |
1385 { | |
1386 restart: | |
1387 if (TREE_USED (exp) || TREE_NO_WARNING (exp)) | |
1388 return 0; | |
1389 | |
1390 /* Don't warn about void constructs. This includes casting to void, | |
1391 void function calls, and statement expressions with a final cast | |
1392 to void. */ | |
1393 if (VOID_TYPE_P (TREE_TYPE (exp))) | |
1394 return 0; | |
1395 | |
1396 if (EXPR_HAS_LOCATION (exp)) | |
1397 locus = EXPR_LOCATION (exp); | |
1398 | |
1399 switch (TREE_CODE (exp)) | |
1400 { | |
1401 case PREINCREMENT_EXPR: | |
1402 case POSTINCREMENT_EXPR: | |
1403 case PREDECREMENT_EXPR: | |
1404 case POSTDECREMENT_EXPR: | |
1405 case MODIFY_EXPR: | |
1406 case INIT_EXPR: | |
1407 case TARGET_EXPR: | |
1408 case CALL_EXPR: | |
1409 case TRY_CATCH_EXPR: | |
1410 case WITH_CLEANUP_EXPR: | |
1411 case EXIT_EXPR: | |
1412 case VA_ARG_EXPR: | |
1413 return 0; | |
1414 | |
1415 case BIND_EXPR: | |
1416 /* For a binding, warn if no side effect within it. */ | |
1417 exp = BIND_EXPR_BODY (exp); | |
1418 goto restart; | |
1419 | |
1420 case SAVE_EXPR: | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1421 case NON_LVALUE_EXPR: |
0 | 1422 exp = TREE_OPERAND (exp, 0); |
1423 goto restart; | |
1424 | |
1425 case TRUTH_ORIF_EXPR: | |
1426 case TRUTH_ANDIF_EXPR: | |
1427 /* In && or ||, warn if 2nd operand has no side effect. */ | |
1428 exp = TREE_OPERAND (exp, 1); | |
1429 goto restart; | |
1430 | |
1431 case COMPOUND_EXPR: | |
1432 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus)) | |
1433 return 1; | |
1434 /* Let people do `(foo (), 0)' without a warning. */ | |
1435 if (TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
1436 return 0; | |
1437 exp = TREE_OPERAND (exp, 1); | |
1438 goto restart; | |
1439 | |
1440 case COND_EXPR: | |
1441 /* If this is an expression with side effects, don't warn; this | |
1442 case commonly appears in macro expansions. */ | |
1443 if (TREE_SIDE_EFFECTS (exp)) | |
1444 return 0; | |
1445 goto warn; | |
1446 | |
1447 case INDIRECT_REF: | |
1448 /* Don't warn about automatic dereferencing of references, since | |
1449 the user cannot control it. */ | |
1450 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE) | |
1451 { | |
1452 exp = TREE_OPERAND (exp, 0); | |
1453 goto restart; | |
1454 } | |
1455 /* Fall through. */ | |
1456 | |
1457 default: | |
1458 /* Referencing a volatile value is a side effect, so don't warn. */ | |
1459 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp)) | |
1460 && TREE_THIS_VOLATILE (exp)) | |
1461 return 0; | |
1462 | |
1463 /* If this is an expression which has no operands, there is no value | |
1464 to be unused. There are no such language-independent codes, | |
1465 but front ends may define such. */ | |
1466 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0) | |
1467 return 0; | |
1468 | |
1469 warn: | |
1470 warning (OPT_Wunused_value, "%Hvalue computed is not used", &locus); | |
1471 return 1; | |
1472 } | |
1473 } | |
1474 | |
1475 | |
1476 /* Generate RTL to return from the current function, with no value. | |
1477 (That is, we do not do anything about returning any value.) */ | |
1478 | |
1479 void | |
1480 expand_null_return (void) | |
1481 { | |
1482 /* If this function was declared to return a value, but we | |
1483 didn't, clobber the return registers so that they are not | |
1484 propagated live to the rest of the function. */ | |
1485 clobber_return_register (); | |
1486 | |
1487 expand_null_return_1 (); | |
1488 } | |
1489 | |
1490 /* Generate RTL to return directly from the current function. | |
1491 (That is, we bypass any return value.) */ | |
1492 | |
1493 void | |
1494 expand_naked_return (void) | |
1495 { | |
1496 rtx end_label; | |
1497 | |
1498 clear_pending_stack_adjust (); | |
1499 do_pending_stack_adjust (); | |
1500 | |
1501 end_label = naked_return_label; | |
1502 if (end_label == 0) | |
1503 end_label = naked_return_label = gen_label_rtx (); | |
1504 | |
1505 emit_jump (end_label); | |
1506 } | |
1507 | |
1508 /* Generate RTL to return from the current function, with value VAL. */ | |
1509 | |
1510 static void | |
1511 expand_value_return (rtx val) | |
1512 { | |
1513 /* Copy the value to the return location | |
1514 unless it's already there. */ | |
1515 | |
1516 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)); | |
1517 if (return_reg != val) | |
1518 { | |
1519 tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
1520 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) | |
1521 { | |
1522 int unsignedp = TYPE_UNSIGNED (type); | |
1523 enum machine_mode old_mode | |
1524 = DECL_MODE (DECL_RESULT (current_function_decl)); | |
1525 enum machine_mode mode | |
1526 = promote_mode (type, old_mode, &unsignedp, 1); | |
1527 | |
1528 if (mode != old_mode) | |
1529 val = convert_modes (mode, old_mode, val, unsignedp); | |
1530 } | |
1531 if (GET_CODE (return_reg) == PARALLEL) | |
1532 emit_group_load (return_reg, val, type, int_size_in_bytes (type)); | |
1533 else | |
1534 emit_move_insn (return_reg, val); | |
1535 } | |
1536 | |
1537 expand_null_return_1 (); | |
1538 } | |
1539 | |
1540 /* Output a return with no value. */ | |
1541 | |
1542 static void | |
1543 expand_null_return_1 (void) | |
1544 { | |
1545 clear_pending_stack_adjust (); | |
1546 do_pending_stack_adjust (); | |
1547 emit_jump (return_label); | |
1548 } | |
1549 | |
1550 /* Generate RTL to evaluate the expression RETVAL and return it | |
1551 from the current function. */ | |
1552 | |
1553 void | |
1554 expand_return (tree retval) | |
1555 { | |
1556 rtx result_rtl; | |
1557 rtx val = 0; | |
1558 tree retval_rhs; | |
1559 | |
1560 /* If function wants no value, give it none. */ | |
1561 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) | |
1562 { | |
1563 expand_normal (retval); | |
1564 expand_null_return (); | |
1565 return; | |
1566 } | |
1567 | |
1568 if (retval == error_mark_node) | |
1569 { | |
1570 /* Treat this like a return of no value from a function that | |
1571 returns a value. */ | |
1572 expand_null_return (); | |
1573 return; | |
1574 } | |
1575 else if ((TREE_CODE (retval) == MODIFY_EXPR | |
1576 || TREE_CODE (retval) == INIT_EXPR) | |
1577 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) | |
1578 retval_rhs = TREE_OPERAND (retval, 1); | |
1579 else | |
1580 retval_rhs = retval; | |
1581 | |
1582 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl)); | |
1583 | |
1584 /* If we are returning the RESULT_DECL, then the value has already | |
1585 been stored into it, so we don't have to do anything special. */ | |
1586 if (TREE_CODE (retval_rhs) == RESULT_DECL) | |
1587 expand_value_return (result_rtl); | |
1588 | |
1589 /* If the result is an aggregate that is being returned in one (or more) | |
1590 registers, load the registers here. The compiler currently can't handle | |
1591 copying a BLKmode value into registers. We could put this code in a | |
1592 more general area (for use by everyone instead of just function | |
1593 call/return), but until this feature is generally usable it is kept here | |
1594 (and in expand_call). */ | |
1595 | |
1596 else if (retval_rhs != 0 | |
1597 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode | |
1598 && REG_P (result_rtl)) | |
1599 { | |
1600 int i; | |
1601 unsigned HOST_WIDE_INT bitpos, xbitpos; | |
1602 unsigned HOST_WIDE_INT padding_correction = 0; | |
1603 unsigned HOST_WIDE_INT bytes | |
1604 = int_size_in_bytes (TREE_TYPE (retval_rhs)); | |
1605 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | |
1606 unsigned int bitsize | |
1607 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD); | |
1608 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs); | |
1609 rtx result_reg, src = NULL_RTX, dst = NULL_RTX; | |
1610 rtx result_val = expand_normal (retval_rhs); | |
1611 enum machine_mode tmpmode, result_reg_mode; | |
1612 | |
1613 if (bytes == 0) | |
1614 { | |
1615 expand_null_return (); | |
1616 return; | |
1617 } | |
1618 | |
1619 /* If the structure doesn't take up a whole number of words, see | |
1620 whether the register value should be padded on the left or on | |
1621 the right. Set PADDING_CORRECTION to the number of padding | |
1622 bits needed on the left side. | |
1623 | |
1624 In most ABIs, the structure will be returned at the least end of | |
1625 the register, which translates to right padding on little-endian | |
1626 targets and left padding on big-endian targets. The opposite | |
1627 holds if the structure is returned at the most significant | |
1628 end of the register. */ | |
1629 if (bytes % UNITS_PER_WORD != 0 | |
1630 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs)) | |
1631 ? !BYTES_BIG_ENDIAN | |
1632 : BYTES_BIG_ENDIAN)) | |
1633 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) | |
1634 * BITS_PER_UNIT)); | |
1635 | |
1636 /* Copy the structure BITSIZE bits at a time. */ | |
1637 for (bitpos = 0, xbitpos = padding_correction; | |
1638 bitpos < bytes * BITS_PER_UNIT; | |
1639 bitpos += bitsize, xbitpos += bitsize) | |
1640 { | |
1641 /* We need a new destination pseudo each time xbitpos is | |
1642 on a word boundary and when xbitpos == padding_correction | |
1643 (the first time through). */ | |
1644 if (xbitpos % BITS_PER_WORD == 0 | |
1645 || xbitpos == padding_correction) | |
1646 { | |
1647 /* Generate an appropriate register. */ | |
1648 dst = gen_reg_rtx (word_mode); | |
1649 result_pseudos[xbitpos / BITS_PER_WORD] = dst; | |
1650 | |
1651 /* Clear the destination before we move anything into it. */ | |
1652 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst))); | |
1653 } | |
1654 | |
1655 /* We need a new source operand each time bitpos is on a word | |
1656 boundary. */ | |
1657 if (bitpos % BITS_PER_WORD == 0) | |
1658 src = operand_subword_force (result_val, | |
1659 bitpos / BITS_PER_WORD, | |
1660 BLKmode); | |
1661 | |
1662 /* Use bitpos for the source extraction (left justified) and | |
1663 xbitpos for the destination store (right justified). */ | |
1664 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode, | |
1665 extract_bit_field (src, bitsize, | |
1666 bitpos % BITS_PER_WORD, 1, | |
1667 NULL_RTX, word_mode, word_mode)); | |
1668 } | |
1669 | |
1670 tmpmode = GET_MODE (result_rtl); | |
1671 if (tmpmode == BLKmode) | |
1672 { | |
1673 /* Find the smallest integer mode large enough to hold the | |
1674 entire structure and use that mode instead of BLKmode | |
1675 on the USE insn for the return register. */ | |
1676 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
1677 tmpmode != VOIDmode; | |
1678 tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1679 /* Have we found a large enough mode? */ | |
1680 if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1681 break; | |
1682 | |
1683 /* A suitable mode should have been found. */ | |
1684 gcc_assert (tmpmode != VOIDmode); | |
1685 | |
1686 PUT_MODE (result_rtl, tmpmode); | |
1687 } | |
1688 | |
1689 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode)) | |
1690 result_reg_mode = word_mode; | |
1691 else | |
1692 result_reg_mode = tmpmode; | |
1693 result_reg = gen_reg_rtx (result_reg_mode); | |
1694 | |
1695 for (i = 0; i < n_regs; i++) | |
1696 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode), | |
1697 result_pseudos[i]); | |
1698 | |
1699 if (tmpmode != result_reg_mode) | |
1700 result_reg = gen_lowpart (tmpmode, result_reg); | |
1701 | |
1702 expand_value_return (result_reg); | |
1703 } | |
1704 else if (retval_rhs != 0 | |
1705 && !VOID_TYPE_P (TREE_TYPE (retval_rhs)) | |
1706 && (REG_P (result_rtl) | |
1707 || (GET_CODE (result_rtl) == PARALLEL))) | |
1708 { | |
1709 /* Calculate the return value into a temporary (usually a pseudo | |
1710 reg). */ | |
1711 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
1712 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST); | |
1713 | |
1714 val = assign_temp (nt, 0, 0, 1); | |
1715 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL); | |
1716 val = force_not_mem (val); | |
1717 /* Return the calculated value. */ | |
1718 expand_value_return (val); | |
1719 } | |
1720 else | |
1721 { | |
1722 /* No hard reg used; calculate value into hard return reg. */ | |
1723 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1724 expand_value_return (result_rtl); | |
1725 } | |
1726 } | |
1727 | |
1728 /* Emit code to restore vital registers at the beginning of a nonlocal goto | |
1729 handler. */ | |
1730 static void | |
1731 expand_nl_goto_receiver (void) | |
1732 { | |
1733 /* Clobber the FP when we get here, so we have to make sure it's | |
1734 marked as used by this function. */ | |
1735 emit_use (hard_frame_pointer_rtx); | |
1736 | |
1737 /* Mark the static chain as clobbered here so life information | |
1738 doesn't get messed up for it. */ | |
1739 emit_clobber (static_chain_rtx); | |
1740 | |
1741 #ifdef HAVE_nonlocal_goto | |
1742 if (! HAVE_nonlocal_goto) | |
1743 #endif | |
1744 /* First adjust our frame pointer to its actual value. It was | |
1745 previously set to the start of the virtual area corresponding to | |
1746 the stacked variables when we branched here and now needs to be | |
1747 adjusted to the actual hardware fp value. | |
1748 | |
1749 Assignments are to virtual registers are converted by | |
1750 instantiate_virtual_regs into the corresponding assignment | |
1751 to the underlying register (fp in this case) that makes | |
1752 the original assignment true. | |
1753 So the following insn will actually be | |
1754 decrementing fp by STARTING_FRAME_OFFSET. */ | |
1755 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
1756 | |
1757 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
1758 if (fixed_regs[ARG_POINTER_REGNUM]) | |
1759 { | |
1760 #ifdef ELIMINABLE_REGS | |
1761 /* If the argument pointer can be eliminated in favor of the | |
1762 frame pointer, we don't need to restore it. We assume here | |
1763 that if such an elimination is present, it can always be used. | |
1764 This is the case on all known machines; if we don't make this | |
1765 assumption, we do unnecessary saving on many machines. */ | |
1766 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
1767 size_t i; | |
1768 | |
1769 for (i = 0; i < ARRAY_SIZE (elim_regs); i++) | |
1770 if (elim_regs[i].from == ARG_POINTER_REGNUM | |
1771 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
1772 break; | |
1773 | |
1774 if (i == ARRAY_SIZE (elim_regs)) | |
1775 #endif | |
1776 { | |
1777 /* Now restore our arg pointer from the address at which it | |
1778 was saved in our stack frame. */ | |
1779 emit_move_insn (crtl->args.internal_arg_pointer, | |
1780 copy_to_reg (get_arg_pointer_save_area ())); | |
1781 } | |
1782 } | |
1783 #endif | |
1784 | |
1785 #ifdef HAVE_nonlocal_goto_receiver | |
1786 if (HAVE_nonlocal_goto_receiver) | |
1787 emit_insn (gen_nonlocal_goto_receiver ()); | |
1788 #endif | |
1789 | |
1790 /* We must not allow the code we just generated to be reordered by | |
1791 scheduling. Specifically, the update of the frame pointer must | |
1792 happen immediately, not later. */ | |
1793 emit_insn (gen_blockage ()); | |
1794 } | |
1795 | |
1796 /* Generate RTL for the automatic variable declaration DECL. | |
1797 (Other kinds of declarations are simply ignored if seen here.) */ | |
1798 | |
1799 void | |
1800 expand_decl (tree decl) | |
1801 { | |
1802 tree type; | |
1803 | |
1804 type = TREE_TYPE (decl); | |
1805 | |
1806 /* For a CONST_DECL, set mode, alignment, and sizes from those of the | |
1807 type in case this node is used in a reference. */ | |
1808 if (TREE_CODE (decl) == CONST_DECL) | |
1809 { | |
1810 DECL_MODE (decl) = TYPE_MODE (type); | |
1811 DECL_ALIGN (decl) = TYPE_ALIGN (type); | |
1812 DECL_SIZE (decl) = TYPE_SIZE (type); | |
1813 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | |
1814 return; | |
1815 } | |
1816 | |
1817 /* Otherwise, only automatic variables need any expansion done. Static and | |
1818 external variables, and external functions, will be handled by | |
1819 `assemble_variable' (called from finish_decl). TYPE_DECL requires | |
1820 nothing. PARM_DECLs are handled in `assign_parms'. */ | |
1821 if (TREE_CODE (decl) != VAR_DECL) | |
1822 return; | |
1823 | |
1824 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) | |
1825 return; | |
1826 | |
1827 /* Create the RTL representation for the variable. */ | |
1828 | |
1829 if (type == error_mark_node) | |
1830 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx)); | |
1831 | |
1832 else if (DECL_SIZE (decl) == 0) | |
1833 { | |
1834 /* Variable with incomplete type. */ | |
1835 rtx x; | |
1836 if (DECL_INITIAL (decl) == 0) | |
1837 /* Error message was already done; now avoid a crash. */ | |
1838 x = gen_rtx_MEM (BLKmode, const0_rtx); | |
1839 else | |
1840 /* An initializer is going to decide the size of this array. | |
1841 Until we know the size, represent its address with a reg. */ | |
1842 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode)); | |
1843 | |
1844 set_mem_attributes (x, decl, 1); | |
1845 SET_DECL_RTL (decl, x); | |
1846 } | |
1847 else if (use_register_for_decl (decl)) | |
1848 { | |
1849 /* Automatic variable that can go in a register. */ | |
1850 int unsignedp = TYPE_UNSIGNED (type); | |
1851 enum machine_mode reg_mode | |
1852 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0); | |
1853 | |
1854 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode)); | |
1855 | |
1856 /* Note if the object is a user variable. */ | |
1857 if (!DECL_ARTIFICIAL (decl)) | |
1858 mark_user_reg (DECL_RTL (decl)); | |
1859 | |
1860 if (POINTER_TYPE_P (type)) | |
1861 mark_reg_pointer (DECL_RTL (decl), | |
1862 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); | |
1863 } | |
1864 | |
1865 else | |
1866 { | |
1867 rtx oldaddr = 0; | |
1868 rtx addr; | |
1869 rtx x; | |
1870 | |
1871 /* Variable-sized decls are dealt with in the gimplifier. */ | |
1872 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST); | |
1873 | |
1874 /* If we previously made RTL for this decl, it must be an array | |
1875 whose size was determined by the initializer. | |
1876 The old address was a register; set that register now | |
1877 to the proper address. */ | |
1878 if (DECL_RTL_SET_P (decl)) | |
1879 { | |
1880 gcc_assert (MEM_P (DECL_RTL (decl))); | |
1881 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0))); | |
1882 oldaddr = XEXP (DECL_RTL (decl), 0); | |
1883 } | |
1884 | |
1885 /* Set alignment we actually gave this decl. */ | |
1886 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT | |
1887 : GET_MODE_BITSIZE (DECL_MODE (decl))); | |
1888 DECL_USER_ALIGN (decl) = 0; | |
1889 | |
1890 x = assign_temp (decl, 1, 1, 1); | |
1891 set_mem_attributes (x, decl, 1); | |
1892 SET_DECL_RTL (decl, x); | |
1893 | |
1894 if (oldaddr) | |
1895 { | |
1896 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr); | |
1897 if (addr != oldaddr) | |
1898 emit_move_insn (oldaddr, addr); | |
1899 } | |
1900 } | |
1901 } | |
1902 | |
1903 /* Emit code to save the current value of stack. */ | |
1904 rtx | |
1905 expand_stack_save (void) | |
1906 { | |
1907 rtx ret = NULL_RTX; | |
1908 | |
1909 do_pending_stack_adjust (); | |
1910 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX); | |
1911 return ret; | |
1912 } | |
1913 | |
1914 /* Emit code to restore the current value of stack. */ | |
1915 void | |
1916 expand_stack_restore (tree var) | |
1917 { | |
1918 rtx sa = expand_normal (var); | |
1919 | |
1920 sa = convert_memory_address (Pmode, sa); | |
1921 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX); | |
1922 } | |
1923 | |
1924 /* Do the insertion of a case label into case_list. The labels are | |
1925 fed to us in descending order from the sorted vector of case labels used | |
1926 in the tree part of the middle end. So the list we construct is | |
1927 sorted in ascending order. The bounds on the case range, LOW and HIGH, | |
1928 are converted to case's index type TYPE. */ | |
1929 | |
1930 static struct case_node * | |
1931 add_case_node (struct case_node *head, tree type, tree low, tree high, | |
1932 tree label, alloc_pool case_node_pool) | |
1933 { | |
1934 tree min_value, max_value; | |
1935 struct case_node *r; | |
1936 | |
1937 gcc_assert (TREE_CODE (low) == INTEGER_CST); | |
1938 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST); | |
1939 | |
1940 min_value = TYPE_MIN_VALUE (type); | |
1941 max_value = TYPE_MAX_VALUE (type); | |
1942 | |
1943 /* If there's no HIGH value, then this is not a case range; it's | |
1944 just a simple case label. But that's just a degenerate case | |
1945 range. | |
1946 If the bounds are equal, turn this into the one-value case. */ | |
1947 if (!high || tree_int_cst_equal (low, high)) | |
1948 { | |
1949 /* If the simple case value is unreachable, ignore it. */ | |
1950 if ((TREE_CODE (min_value) == INTEGER_CST | |
1951 && tree_int_cst_compare (low, min_value) < 0) | |
1952 || (TREE_CODE (max_value) == INTEGER_CST | |
1953 && tree_int_cst_compare (low, max_value) > 0)) | |
1954 return head; | |
1955 low = fold_convert (type, low); | |
1956 high = low; | |
1957 } | |
1958 else | |
1959 { | |
1960 /* If the entire case range is unreachable, ignore it. */ | |
1961 if ((TREE_CODE (min_value) == INTEGER_CST | |
1962 && tree_int_cst_compare (high, min_value) < 0) | |
1963 || (TREE_CODE (max_value) == INTEGER_CST | |
1964 && tree_int_cst_compare (low, max_value) > 0)) | |
1965 return head; | |
1966 | |
1967 /* If the lower bound is less than the index type's minimum | |
1968 value, truncate the range bounds. */ | |
1969 if (TREE_CODE (min_value) == INTEGER_CST | |
1970 && tree_int_cst_compare (low, min_value) < 0) | |
1971 low = min_value; | |
1972 low = fold_convert (type, low); | |
1973 | |
1974 /* If the upper bound is greater than the index type's maximum | |
1975 value, truncate the range bounds. */ | |
1976 if (TREE_CODE (max_value) == INTEGER_CST | |
1977 && tree_int_cst_compare (high, max_value) > 0) | |
1978 high = max_value; | |
1979 high = fold_convert (type, high); | |
1980 } | |
1981 | |
1982 | |
1983 /* Add this label to the chain. Make sure to drop overflow flags. */ | |
1984 r = (struct case_node *) pool_alloc (case_node_pool); | |
1985 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low), | |
1986 TREE_INT_CST_HIGH (low)); | |
1987 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high), | |
1988 TREE_INT_CST_HIGH (high)); | |
1989 r->code_label = label; | |
1990 r->parent = r->left = NULL; | |
1991 r->right = head; | |
1992 return r; | |
1993 } | |
1994 | |
1995 /* Maximum number of case bit tests. */ | |
1996 #define MAX_CASE_BIT_TESTS 3 | |
1997 | |
1998 /* By default, enable case bit tests on targets with ashlsi3. */ | |
1999 #ifndef CASE_USE_BIT_TESTS | |
2000 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \ | |
2001 != CODE_FOR_nothing) | |
2002 #endif | |
2003 | |
2004 | |
2005 /* A case_bit_test represents a set of case nodes that may be | |
2006 selected from using a bit-wise comparison. HI and LO hold | |
2007 the integer to be tested against, LABEL contains the label | |
2008 to jump to upon success and BITS counts the number of case | |
2009 nodes handled by this test, typically the number of bits | |
2010 set in HI:LO. */ | |
2011 | |
2012 struct case_bit_test | |
2013 { | |
2014 HOST_WIDE_INT hi; | |
2015 HOST_WIDE_INT lo; | |
2016 rtx label; | |
2017 int bits; | |
2018 }; | |
2019 | |
2020 /* Determine whether "1 << x" is relatively cheap in word_mode. */ | |
2021 | |
2022 static | |
2023 bool lshift_cheap_p (void) | |
2024 { | |
2025 static bool init = false; | |
2026 static bool cheap = true; | |
2027 | |
2028 if (!init) | |
2029 { | |
2030 rtx reg = gen_rtx_REG (word_mode, 10000); | |
2031 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET, | |
2032 optimize_insn_for_speed_p ()); | |
2033 cheap = cost < COSTS_N_INSNS (3); | |
2034 init = true; | |
2035 } | |
2036 | |
2037 return cheap; | |
2038 } | |
2039 | |
2040 /* Comparison function for qsort to order bit tests by decreasing | |
2041 number of case nodes, i.e. the node with the most cases gets | |
2042 tested first. */ | |
2043 | |
2044 static int | |
2045 case_bit_test_cmp (const void *p1, const void *p2) | |
2046 { | |
2047 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1; | |
2048 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2; | |
2049 | |
2050 if (d2->bits != d1->bits) | |
2051 return d2->bits - d1->bits; | |
2052 | |
2053 /* Stabilize the sort. */ | |
2054 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label); | |
2055 } | |
2056 | |
2057 /* Expand a switch statement by a short sequence of bit-wise | |
2058 comparisons. "switch(x)" is effectively converted into | |
2059 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are | |
2060 integer constants. | |
2061 | |
2062 INDEX_EXPR is the value being switched on, which is of | |
2063 type INDEX_TYPE. MINVAL is the lowest case value of in | |
2064 the case nodes, of INDEX_TYPE type, and RANGE is highest | |
2065 value minus MINVAL, also of type INDEX_TYPE. NODES is | |
2066 the set of case nodes, and DEFAULT_LABEL is the label to | |
2067 branch to should none of the cases match. | |
2068 | |
2069 There *MUST* be MAX_CASE_BIT_TESTS or less unique case | |
2070 node targets. */ | |
2071 | |
2072 static void | |
2073 emit_case_bit_tests (tree index_type, tree index_expr, tree minval, | |
2074 tree range, case_node_ptr nodes, rtx default_label) | |
2075 { | |
2076 struct case_bit_test test[MAX_CASE_BIT_TESTS]; | |
2077 enum machine_mode mode; | |
2078 rtx expr, index, label; | |
2079 unsigned int i,j,lo,hi; | |
2080 struct case_node *n; | |
2081 unsigned int count; | |
2082 | |
2083 count = 0; | |
2084 for (n = nodes; n; n = n->right) | |
2085 { | |
2086 label = label_rtx (n->code_label); | |
2087 for (i = 0; i < count; i++) | |
2088 if (label == test[i].label) | |
2089 break; | |
2090 | |
2091 if (i == count) | |
2092 { | |
2093 gcc_assert (count < MAX_CASE_BIT_TESTS); | |
2094 test[i].hi = 0; | |
2095 test[i].lo = 0; | |
2096 test[i].label = label; | |
2097 test[i].bits = 1; | |
2098 count++; | |
2099 } | |
2100 else | |
2101 test[i].bits++; | |
2102 | |
2103 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2104 n->low, minval), 1); | |
2105 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2106 n->high, minval), 1); | |
2107 for (j = lo; j <= hi; j++) | |
2108 if (j >= HOST_BITS_PER_WIDE_INT) | |
2109 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT); | |
2110 else | |
2111 test[i].lo |= (HOST_WIDE_INT) 1 << j; | |
2112 } | |
2113 | |
2114 qsort (test, count, sizeof(*test), case_bit_test_cmp); | |
2115 | |
2116 index_expr = fold_build2 (MINUS_EXPR, index_type, | |
2117 fold_convert (index_type, index_expr), | |
2118 fold_convert (index_type, minval)); | |
2119 index = expand_normal (index_expr); | |
2120 do_pending_stack_adjust (); | |
2121 | |
2122 mode = TYPE_MODE (index_type); | |
2123 expr = expand_normal (range); | |
2124 if (default_label) | |
2125 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1, | |
2126 default_label); | |
2127 | |
2128 index = convert_to_mode (word_mode, index, 0); | |
2129 index = expand_binop (word_mode, ashl_optab, const1_rtx, | |
2130 index, NULL_RTX, 1, OPTAB_WIDEN); | |
2131 | |
2132 for (i = 0; i < count; i++) | |
2133 { | |
2134 expr = immed_double_const (test[i].lo, test[i].hi, word_mode); | |
2135 expr = expand_binop (word_mode, and_optab, index, expr, | |
2136 NULL_RTX, 1, OPTAB_WIDEN); | |
2137 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX, | |
2138 word_mode, 1, test[i].label); | |
2139 } | |
2140 | |
2141 if (default_label) | |
2142 emit_jump (default_label); | |
2143 } | |
2144 | |
2145 #ifndef HAVE_casesi | |
2146 #define HAVE_casesi 0 | |
2147 #endif | |
2148 | |
2149 #ifndef HAVE_tablejump | |
2150 #define HAVE_tablejump 0 | |
2151 #endif | |
2152 | |
2153 /* Terminate a case (Pascal/Ada) or switch (C) statement | |
2154 in which ORIG_INDEX is the expression to be tested. | |
2155 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX | |
2156 type as given in the source before any compiler conversions. | |
2157 Generate the code to test it and jump to the right place. */ | |
2158 | |
2159 void | |
2160 expand_case (tree exp) | |
2161 { | |
2162 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE; | |
2163 rtx default_label = 0; | |
2164 struct case_node *n; | |
2165 unsigned int count, uniq; | |
2166 rtx index; | |
2167 rtx table_label; | |
2168 int ncases; | |
2169 rtx *labelvec; | |
2170 int i; | |
2171 rtx before_case, end, lab; | |
2172 | |
2173 tree vec = SWITCH_LABELS (exp); | |
2174 tree orig_type = TREE_TYPE (exp); | |
2175 tree index_expr = SWITCH_COND (exp); | |
2176 tree index_type = TREE_TYPE (index_expr); | |
2177 int unsignedp = TYPE_UNSIGNED (index_type); | |
2178 | |
2179 /* The insn after which the case dispatch should finally | |
2180 be emitted. Zero for a dummy. */ | |
2181 rtx start; | |
2182 | |
2183 /* A list of case labels; it is first built as a list and it may then | |
2184 be rearranged into a nearly balanced binary tree. */ | |
2185 struct case_node *case_list = 0; | |
2186 | |
2187 /* Label to jump to if no case matches. */ | |
2188 tree default_label_decl = NULL_TREE; | |
2189 | |
2190 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool", | |
2191 sizeof (struct case_node), | |
2192 100); | |
2193 | |
2194 /* The switch body is lowered in gimplify.c, we should never have | |
2195 switches with a non-NULL SWITCH_BODY here. */ | |
2196 gcc_assert (!SWITCH_BODY (exp)); | |
2197 gcc_assert (SWITCH_LABELS (exp)); | |
2198 | |
2199 do_pending_stack_adjust (); | |
2200 | |
2201 /* An ERROR_MARK occurs for various reasons including invalid data type. */ | |
2202 if (index_type != error_mark_node) | |
2203 { | |
2204 tree elt; | |
2205 bitmap label_bitmap; | |
2206 int vl = TREE_VEC_LENGTH (vec); | |
2207 | |
2208 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index | |
2209 expressions being INTEGER_CST. */ | |
2210 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST); | |
2211 | |
2212 /* The default case, if ever taken, is at the end of TREE_VEC. */ | |
2213 elt = TREE_VEC_ELT (vec, vl - 1); | |
2214 if (!CASE_LOW (elt) && !CASE_HIGH (elt)) | |
2215 { | |
2216 default_label_decl = CASE_LABEL (elt); | |
2217 --vl; | |
2218 } | |
2219 | |
2220 for (i = vl - 1; i >= 0; --i) | |
2221 { | |
2222 tree low, high; | |
2223 elt = TREE_VEC_ELT (vec, i); | |
2224 | |
2225 low = CASE_LOW (elt); | |
2226 gcc_assert (low); | |
2227 high = CASE_HIGH (elt); | |
2228 | |
2229 /* Discard empty ranges. */ | |
2230 if (high && tree_int_cst_lt (high, low)) | |
2231 continue; | |
2232 | |
2233 case_list = add_case_node (case_list, index_type, low, high, | |
2234 CASE_LABEL (elt), case_node_pool); | |
2235 } | |
2236 | |
2237 | |
2238 before_case = start = get_last_insn (); | |
2239 if (default_label_decl) | |
2240 default_label = label_rtx (default_label_decl); | |
2241 | |
2242 /* Get upper and lower bounds of case values. */ | |
2243 | |
2244 uniq = 0; | |
2245 count = 0; | |
2246 label_bitmap = BITMAP_ALLOC (NULL); | |
2247 for (n = case_list; n; n = n->right) | |
2248 { | |
2249 /* Count the elements and track the largest and smallest | |
2250 of them (treating them as signed even if they are not). */ | |
2251 if (count++ == 0) | |
2252 { | |
2253 minval = n->low; | |
2254 maxval = n->high; | |
2255 } | |
2256 else | |
2257 { | |
2258 if (tree_int_cst_lt (n->low, minval)) | |
2259 minval = n->low; | |
2260 if (tree_int_cst_lt (maxval, n->high)) | |
2261 maxval = n->high; | |
2262 } | |
2263 /* A range counts double, since it requires two compares. */ | |
2264 if (! tree_int_cst_equal (n->low, n->high)) | |
2265 count++; | |
2266 | |
2267 /* If we have not seen this label yet, then increase the | |
2268 number of unique case node targets seen. */ | |
2269 lab = label_rtx (n->code_label); | |
2270 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab))) | |
2271 { | |
2272 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)); | |
2273 uniq++; | |
2274 } | |
2275 } | |
2276 | |
2277 BITMAP_FREE (label_bitmap); | |
2278 | |
2279 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single | |
2280 destination, such as one with a default case only. However, | |
2281 it doesn't remove cases that are out of range for the switch | |
2282 type, so we may still get a zero here. */ | |
2283 if (count == 0) | |
2284 { | |
2285 if (default_label) | |
2286 emit_jump (default_label); | |
2287 free_alloc_pool (case_node_pool); | |
2288 return; | |
2289 } | |
2290 | |
2291 /* Compute span of values. */ | |
2292 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval); | |
2293 | |
2294 /* Try implementing this switch statement by a short sequence of | |
2295 bit-wise comparisons. However, we let the binary-tree case | |
2296 below handle constant index expressions. */ | |
2297 if (CASE_USE_BIT_TESTS | |
2298 && ! TREE_CONSTANT (index_expr) | |
2299 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0 | |
2300 && compare_tree_int (range, 0) > 0 | |
2301 && lshift_cheap_p () | |
2302 && ((uniq == 1 && count >= 3) | |
2303 || (uniq == 2 && count >= 5) | |
2304 || (uniq == 3 && count >= 6))) | |
2305 { | |
2306 /* Optimize the case where all the case values fit in a | |
2307 word without having to subtract MINVAL. In this case, | |
2308 we can optimize away the subtraction. */ | |
2309 if (compare_tree_int (minval, 0) > 0 | |
2310 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0) | |
2311 { | |
2312 minval = build_int_cst (index_type, 0); | |
2313 range = maxval; | |
2314 } | |
2315 emit_case_bit_tests (index_type, index_expr, minval, range, | |
2316 case_list, default_label); | |
2317 } | |
2318 | |
2319 /* If range of values is much bigger than number of values, | |
2320 make a sequence of conditional branches instead of a dispatch. | |
2321 If the switch-index is a constant, do it this way | |
2322 because we can optimize it. */ | |
2323 | |
2324 else if (count < case_values_threshold () | |
2325 || compare_tree_int (range, | |
2326 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0 | |
2327 /* RANGE may be signed, and really large ranges will show up | |
2328 as negative numbers. */ | |
2329 || compare_tree_int (range, 0) < 0 | |
2330 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT | |
2331 || flag_pic | |
2332 #endif | |
2333 || !flag_jump_tables | |
2334 || TREE_CONSTANT (index_expr) | |
2335 /* If neither casesi or tablejump is available, we can | |
2336 only go this way. */ | |
2337 || (!HAVE_casesi && !HAVE_tablejump)) | |
2338 { | |
2339 index = expand_normal (index_expr); | |
2340 | |
2341 /* If the index is a short or char that we do not have | |
2342 an insn to handle comparisons directly, convert it to | |
2343 a full integer now, rather than letting each comparison | |
2344 generate the conversion. */ | |
2345 | |
2346 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT | |
2347 && ! have_insn_for (COMPARE, GET_MODE (index))) | |
2348 { | |
2349 enum machine_mode wider_mode; | |
2350 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode; | |
2351 wider_mode = GET_MODE_WIDER_MODE (wider_mode)) | |
2352 if (have_insn_for (COMPARE, wider_mode)) | |
2353 { | |
2354 index = convert_to_mode (wider_mode, index, unsignedp); | |
2355 break; | |
2356 } | |
2357 } | |
2358 | |
2359 do_pending_stack_adjust (); | |
2360 | |
2361 if (MEM_P (index)) | |
2362 index = copy_to_reg (index); | |
2363 | |
2364 /* We generate a binary decision tree to select the | |
2365 appropriate target code. This is done as follows: | |
2366 | |
2367 The list of cases is rearranged into a binary tree, | |
2368 nearly optimal assuming equal probability for each case. | |
2369 | |
2370 The tree is transformed into RTL, eliminating | |
2371 redundant test conditions at the same time. | |
2372 | |
2373 If program flow could reach the end of the | |
2374 decision tree an unconditional jump to the | |
2375 default code is emitted. */ | |
2376 | |
2377 use_cost_table | |
2378 = (TREE_CODE (orig_type) != ENUMERAL_TYPE | |
2379 && estimate_case_costs (case_list)); | |
2380 balance_case_nodes (&case_list, NULL); | |
2381 emit_case_nodes (index, case_list, default_label, index_type); | |
2382 if (default_label) | |
2383 emit_jump (default_label); | |
2384 } | |
2385 else | |
2386 { | |
2387 rtx fallback_label = label_rtx (case_list->code_label); | |
2388 table_label = gen_label_rtx (); | |
2389 if (! try_casesi (index_type, index_expr, minval, range, | |
2390 table_label, default_label, fallback_label)) | |
2391 { | |
2392 bool ok; | |
2393 | |
2394 /* Index jumptables from zero for suitable values of | |
2395 minval to avoid a subtraction. */ | |
2396 if (optimize_insn_for_speed_p () | |
2397 && compare_tree_int (minval, 0) > 0 | |
2398 && compare_tree_int (minval, 3) < 0) | |
2399 { | |
2400 minval = build_int_cst (index_type, 0); | |
2401 range = maxval; | |
2402 } | |
2403 | |
2404 ok = try_tablejump (index_type, index_expr, minval, range, | |
2405 table_label, default_label); | |
2406 gcc_assert (ok); | |
2407 } | |
2408 | |
2409 /* Get table of labels to jump to, in order of case index. */ | |
2410 | |
2411 ncases = tree_low_cst (range, 0) + 1; | |
2412 labelvec = XALLOCAVEC (rtx, ncases); | |
2413 memset (labelvec, 0, ncases * sizeof (rtx)); | |
2414 | |
2415 for (n = case_list; n; n = n->right) | |
2416 { | |
2417 /* Compute the low and high bounds relative to the minimum | |
2418 value since that should fit in a HOST_WIDE_INT while the | |
2419 actual values may not. */ | |
2420 HOST_WIDE_INT i_low | |
2421 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2422 n->low, minval), 1); | |
2423 HOST_WIDE_INT i_high | |
2424 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2425 n->high, minval), 1); | |
2426 HOST_WIDE_INT i; | |
2427 | |
2428 for (i = i_low; i <= i_high; i ++) | |
2429 labelvec[i] | |
2430 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label)); | |
2431 } | |
2432 | |
2433 /* Fill in the gaps with the default. We may have gaps at | |
2434 the beginning if we tried to avoid the minval subtraction, | |
2435 so substitute some label even if the default label was | |
2436 deemed unreachable. */ | |
2437 if (!default_label) | |
2438 default_label = fallback_label; | |
2439 for (i = 0; i < ncases; i++) | |
2440 if (labelvec[i] == 0) | |
2441 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label); | |
2442 | |
2443 /* Output the table. */ | |
2444 emit_label (table_label); | |
2445 | |
2446 if (CASE_VECTOR_PC_RELATIVE || flag_pic) | |
2447 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE, | |
2448 gen_rtx_LABEL_REF (Pmode, table_label), | |
2449 gen_rtvec_v (ncases, labelvec), | |
2450 const0_rtx, const0_rtx)); | |
2451 else | |
2452 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE, | |
2453 gen_rtvec_v (ncases, labelvec))); | |
2454 | |
2455 /* Record no drop-through after the table. */ | |
2456 emit_barrier (); | |
2457 } | |
2458 | |
2459 before_case = NEXT_INSN (before_case); | |
2460 end = get_last_insn (); | |
2461 reorder_insns (before_case, end, start); | |
2462 } | |
2463 | |
2464 free_temp_slots (); | |
2465 free_alloc_pool (case_node_pool); | |
2466 } | |
2467 | |
2468 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */ | |
2469 | |
2470 static void | |
2471 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label, | |
2472 int unsignedp) | |
2473 { | |
2474 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode, | |
2475 NULL_RTX, NULL_RTX, label); | |
2476 } | |
2477 | |
2478 /* Not all case values are encountered equally. This function | |
2479 uses a heuristic to weight case labels, in cases where that | |
2480 looks like a reasonable thing to do. | |
2481 | |
2482 Right now, all we try to guess is text, and we establish the | |
2483 following weights: | |
2484 | |
2485 chars above space: 16 | |
2486 digits: 16 | |
2487 default: 12 | |
2488 space, punct: 8 | |
2489 tab: 4 | |
2490 newline: 2 | |
2491 other "\" chars: 1 | |
2492 remaining chars: 0 | |
2493 | |
2494 If we find any cases in the switch that are not either -1 or in the range | |
2495 of valid ASCII characters, or are control characters other than those | |
2496 commonly used with "\", don't treat this switch scanning text. | |
2497 | |
2498 Return 1 if these nodes are suitable for cost estimation, otherwise | |
2499 return 0. */ | |
2500 | |
2501 static int | |
2502 estimate_case_costs (case_node_ptr node) | |
2503 { | |
2504 tree min_ascii = integer_minus_one_node; | |
2505 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127); | |
2506 case_node_ptr n; | |
2507 int i; | |
2508 | |
2509 /* If we haven't already made the cost table, make it now. Note that the | |
2510 lower bound of the table is -1, not zero. */ | |
2511 | |
2512 if (! cost_table_initialized) | |
2513 { | |
2514 cost_table_initialized = 1; | |
2515 | |
2516 for (i = 0; i < 128; i++) | |
2517 { | |
2518 if (ISALNUM (i)) | |
2519 COST_TABLE (i) = 16; | |
2520 else if (ISPUNCT (i)) | |
2521 COST_TABLE (i) = 8; | |
2522 else if (ISCNTRL (i)) | |
2523 COST_TABLE (i) = -1; | |
2524 } | |
2525 | |
2526 COST_TABLE (' ') = 8; | |
2527 COST_TABLE ('\t') = 4; | |
2528 COST_TABLE ('\0') = 4; | |
2529 COST_TABLE ('\n') = 2; | |
2530 COST_TABLE ('\f') = 1; | |
2531 COST_TABLE ('\v') = 1; | |
2532 COST_TABLE ('\b') = 1; | |
2533 } | |
2534 | |
2535 /* See if all the case expressions look like text. It is text if the | |
2536 constant is >= -1 and the highest constant is <= 127. Do all comparisons | |
2537 as signed arithmetic since we don't want to ever access cost_table with a | |
2538 value less than -1. Also check that none of the constants in a range | |
2539 are strange control characters. */ | |
2540 | |
2541 for (n = node; n; n = n->right) | |
2542 { | |
2543 if (tree_int_cst_lt (n->low, min_ascii) | |
2544 || tree_int_cst_lt (max_ascii, n->high)) | |
2545 return 0; | |
2546 | |
2547 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low); | |
2548 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++) | |
2549 if (COST_TABLE (i) < 0) | |
2550 return 0; | |
2551 } | |
2552 | |
2553 /* All interesting values are within the range of interesting | |
2554 ASCII characters. */ | |
2555 return 1; | |
2556 } | |
2557 | |
2558 /* Take an ordered list of case nodes | |
2559 and transform them into a near optimal binary tree, | |
2560 on the assumption that any target code selection value is as | |
2561 likely as any other. | |
2562 | |
2563 The transformation is performed by splitting the ordered | |
2564 list into two equal sections plus a pivot. The parts are | |
2565 then attached to the pivot as left and right branches. Each | |
2566 branch is then transformed recursively. */ | |
2567 | |
2568 static void | |
2569 balance_case_nodes (case_node_ptr *head, case_node_ptr parent) | |
2570 { | |
2571 case_node_ptr np; | |
2572 | |
2573 np = *head; | |
2574 if (np) | |
2575 { | |
2576 int cost = 0; | |
2577 int i = 0; | |
2578 int ranges = 0; | |
2579 case_node_ptr *npp; | |
2580 case_node_ptr left; | |
2581 | |
2582 /* Count the number of entries on branch. Also count the ranges. */ | |
2583 | |
2584 while (np) | |
2585 { | |
2586 if (!tree_int_cst_equal (np->low, np->high)) | |
2587 { | |
2588 ranges++; | |
2589 if (use_cost_table) | |
2590 cost += COST_TABLE (TREE_INT_CST_LOW (np->high)); | |
2591 } | |
2592 | |
2593 if (use_cost_table) | |
2594 cost += COST_TABLE (TREE_INT_CST_LOW (np->low)); | |
2595 | |
2596 i++; | |
2597 np = np->right; | |
2598 } | |
2599 | |
2600 if (i > 2) | |
2601 { | |
2602 /* Split this list if it is long enough for that to help. */ | |
2603 npp = head; | |
2604 left = *npp; | |
2605 if (use_cost_table) | |
2606 { | |
2607 /* Find the place in the list that bisects the list's total cost, | |
2608 Here I gets half the total cost. */ | |
2609 int n_moved = 0; | |
2610 i = (cost + 1) / 2; | |
2611 while (1) | |
2612 { | |
2613 /* Skip nodes while their cost does not reach that amount. */ | |
2614 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) | |
2615 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high)); | |
2616 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low)); | |
2617 if (i <= 0) | |
2618 break; | |
2619 npp = &(*npp)->right; | |
2620 n_moved += 1; | |
2621 } | |
2622 if (n_moved == 0) | |
2623 { | |
2624 /* Leave this branch lopsided, but optimize left-hand | |
2625 side and fill in `parent' fields for right-hand side. */ | |
2626 np = *head; | |
2627 np->parent = parent; | |
2628 balance_case_nodes (&np->left, np); | |
2629 for (; np->right; np = np->right) | |
2630 np->right->parent = np; | |
2631 return; | |
2632 } | |
2633 } | |
2634 /* If there are just three nodes, split at the middle one. */ | |
2635 else if (i == 3) | |
2636 npp = &(*npp)->right; | |
2637 else | |
2638 { | |
2639 /* Find the place in the list that bisects the list's total cost, | |
2640 where ranges count as 2. | |
2641 Here I gets half the total cost. */ | |
2642 i = (i + ranges + 1) / 2; | |
2643 while (1) | |
2644 { | |
2645 /* Skip nodes while their cost does not reach that amount. */ | |
2646 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) | |
2647 i--; | |
2648 i--; | |
2649 if (i <= 0) | |
2650 break; | |
2651 npp = &(*npp)->right; | |
2652 } | |
2653 } | |
2654 *head = np = *npp; | |
2655 *npp = 0; | |
2656 np->parent = parent; | |
2657 np->left = left; | |
2658 | |
2659 /* Optimize each of the two split parts. */ | |
2660 balance_case_nodes (&np->left, np); | |
2661 balance_case_nodes (&np->right, np); | |
2662 } | |
2663 else | |
2664 { | |
2665 /* Else leave this branch as one level, | |
2666 but fill in `parent' fields. */ | |
2667 np = *head; | |
2668 np->parent = parent; | |
2669 for (; np->right; np = np->right) | |
2670 np->right->parent = np; | |
2671 } | |
2672 } | |
2673 } | |
2674 | |
2675 /* Search the parent sections of the case node tree | |
2676 to see if a test for the lower bound of NODE would be redundant. | |
2677 INDEX_TYPE is the type of the index expression. | |
2678 | |
2679 The instructions to generate the case decision tree are | |
2680 output in the same order as nodes are processed so it is | |
2681 known that if a parent node checks the range of the current | |
2682 node minus one that the current node is bounded at its lower | |
2683 span. Thus the test would be redundant. */ | |
2684 | |
2685 static int | |
2686 node_has_low_bound (case_node_ptr node, tree index_type) | |
2687 { | |
2688 tree low_minus_one; | |
2689 case_node_ptr pnode; | |
2690 | |
2691 /* If the lower bound of this node is the lowest value in the index type, | |
2692 we need not test it. */ | |
2693 | |
2694 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type))) | |
2695 return 1; | |
2696 | |
2697 /* If this node has a left branch, the value at the left must be less | |
2698 than that at this node, so it cannot be bounded at the bottom and | |
2699 we need not bother testing any further. */ | |
2700 | |
2701 if (node->left) | |
2702 return 0; | |
2703 | |
2704 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low), | |
2705 node->low, | |
2706 build_int_cst (TREE_TYPE (node->low), 1)); | |
2707 | |
2708 /* If the subtraction above overflowed, we can't verify anything. | |
2709 Otherwise, look for a parent that tests our value - 1. */ | |
2710 | |
2711 if (! tree_int_cst_lt (low_minus_one, node->low)) | |
2712 return 0; | |
2713 | |
2714 for (pnode = node->parent; pnode; pnode = pnode->parent) | |
2715 if (tree_int_cst_equal (low_minus_one, pnode->high)) | |
2716 return 1; | |
2717 | |
2718 return 0; | |
2719 } | |
2720 | |
2721 /* Search the parent sections of the case node tree | |
2722 to see if a test for the upper bound of NODE would be redundant. | |
2723 INDEX_TYPE is the type of the index expression. | |
2724 | |
2725 The instructions to generate the case decision tree are | |
2726 output in the same order as nodes are processed so it is | |
2727 known that if a parent node checks the range of the current | |
2728 node plus one that the current node is bounded at its upper | |
2729 span. Thus the test would be redundant. */ | |
2730 | |
2731 static int | |
2732 node_has_high_bound (case_node_ptr node, tree index_type) | |
2733 { | |
2734 tree high_plus_one; | |
2735 case_node_ptr pnode; | |
2736 | |
2737 /* If there is no upper bound, obviously no test is needed. */ | |
2738 | |
2739 if (TYPE_MAX_VALUE (index_type) == NULL) | |
2740 return 1; | |
2741 | |
2742 /* If the upper bound of this node is the highest value in the type | |
2743 of the index expression, we need not test against it. */ | |
2744 | |
2745 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type))) | |
2746 return 1; | |
2747 | |
2748 /* If this node has a right branch, the value at the right must be greater | |
2749 than that at this node, so it cannot be bounded at the top and | |
2750 we need not bother testing any further. */ | |
2751 | |
2752 if (node->right) | |
2753 return 0; | |
2754 | |
2755 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high), | |
2756 node->high, | |
2757 build_int_cst (TREE_TYPE (node->high), 1)); | |
2758 | |
2759 /* If the addition above overflowed, we can't verify anything. | |
2760 Otherwise, look for a parent that tests our value + 1. */ | |
2761 | |
2762 if (! tree_int_cst_lt (node->high, high_plus_one)) | |
2763 return 0; | |
2764 | |
2765 for (pnode = node->parent; pnode; pnode = pnode->parent) | |
2766 if (tree_int_cst_equal (high_plus_one, pnode->low)) | |
2767 return 1; | |
2768 | |
2769 return 0; | |
2770 } | |
2771 | |
2772 /* Search the parent sections of the | |
2773 case node tree to see if both tests for the upper and lower | |
2774 bounds of NODE would be redundant. */ | |
2775 | |
2776 static int | |
2777 node_is_bounded (case_node_ptr node, tree index_type) | |
2778 { | |
2779 return (node_has_low_bound (node, index_type) | |
2780 && node_has_high_bound (node, index_type)); | |
2781 } | |
2782 | |
2783 /* Emit step-by-step code to select a case for the value of INDEX. | |
2784 The thus generated decision tree follows the form of the | |
2785 case-node binary tree NODE, whose nodes represent test conditions. | |
2786 INDEX_TYPE is the type of the index of the switch. | |
2787 | |
2788 Care is taken to prune redundant tests from the decision tree | |
2789 by detecting any boundary conditions already checked by | |
2790 emitted rtx. (See node_has_high_bound, node_has_low_bound | |
2791 and node_is_bounded, above.) | |
2792 | |
2793 Where the test conditions can be shown to be redundant we emit | |
2794 an unconditional jump to the target code. As a further | |
2795 optimization, the subordinates of a tree node are examined to | |
2796 check for bounded nodes. In this case conditional and/or | |
2797 unconditional jumps as a result of the boundary check for the | |
2798 current node are arranged to target the subordinates associated | |
2799 code for out of bound conditions on the current node. | |
2800 | |
2801 We can assume that when control reaches the code generated here, | |
2802 the index value has already been compared with the parents | |
2803 of this node, and determined to be on the same side of each parent | |
2804 as this node is. Thus, if this node tests for the value 51, | |
2805 and a parent tested for 52, we don't need to consider | |
2806 the possibility of a value greater than 51. If another parent | |
2807 tests for the value 50, then this node need not test anything. */ | |
2808 | |
2809 static void | |
2810 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label, | |
2811 tree index_type) | |
2812 { | |
2813 /* If INDEX has an unsigned type, we must make unsigned branches. */ | |
2814 int unsignedp = TYPE_UNSIGNED (index_type); | |
2815 enum machine_mode mode = GET_MODE (index); | |
2816 enum machine_mode imode = TYPE_MODE (index_type); | |
2817 | |
2818 /* Handle indices detected as constant during RTL expansion. */ | |
2819 if (mode == VOIDmode) | |
2820 mode = imode; | |
2821 | |
2822 /* See if our parents have already tested everything for us. | |
2823 If they have, emit an unconditional jump for this node. */ | |
2824 if (node_is_bounded (node, index_type)) | |
2825 emit_jump (label_rtx (node->code_label)); | |
2826 | |
2827 else if (tree_int_cst_equal (node->low, node->high)) | |
2828 { | |
2829 /* Node is single valued. First see if the index expression matches | |
2830 this node and then check our children, if any. */ | |
2831 | |
2832 do_jump_if_equal (mode, index, | |
2833 convert_modes (mode, imode, | |
2834 expand_normal (node->low), | |
2835 unsignedp), | |
2836 label_rtx (node->code_label), unsignedp); | |
2837 | |
2838 if (node->right != 0 && node->left != 0) | |
2839 { | |
2840 /* This node has children on both sides. | |
2841 Dispatch to one side or the other | |
2842 by comparing the index value with this node's value. | |
2843 If one subtree is bounded, check that one first, | |
2844 so we can avoid real branches in the tree. */ | |
2845 | |
2846 if (node_is_bounded (node->right, index_type)) | |
2847 { | |
2848 emit_cmp_and_jump_insns (index, | |
2849 convert_modes | |
2850 (mode, imode, | |
2851 expand_normal (node->high), | |
2852 unsignedp), | |
2853 GT, NULL_RTX, mode, unsignedp, | |
2854 label_rtx (node->right->code_label)); | |
2855 emit_case_nodes (index, node->left, default_label, index_type); | |
2856 } | |
2857 | |
2858 else if (node_is_bounded (node->left, index_type)) | |
2859 { | |
2860 emit_cmp_and_jump_insns (index, | |
2861 convert_modes | |
2862 (mode, imode, | |
2863 expand_normal (node->high), | |
2864 unsignedp), | |
2865 LT, NULL_RTX, mode, unsignedp, | |
2866 label_rtx (node->left->code_label)); | |
2867 emit_case_nodes (index, node->right, default_label, index_type); | |
2868 } | |
2869 | |
2870 /* If both children are single-valued cases with no | |
2871 children, finish up all the work. This way, we can save | |
2872 one ordered comparison. */ | |
2873 else if (tree_int_cst_equal (node->right->low, node->right->high) | |
2874 && node->right->left == 0 | |
2875 && node->right->right == 0 | |
2876 && tree_int_cst_equal (node->left->low, node->left->high) | |
2877 && node->left->left == 0 | |
2878 && node->left->right == 0) | |
2879 { | |
2880 /* Neither node is bounded. First distinguish the two sides; | |
2881 then emit the code for one side at a time. */ | |
2882 | |
2883 /* See if the value matches what the right hand side | |
2884 wants. */ | |
2885 do_jump_if_equal (mode, index, | |
2886 convert_modes (mode, imode, | |
2887 expand_normal (node->right->low), | |
2888 unsignedp), | |
2889 label_rtx (node->right->code_label), | |
2890 unsignedp); | |
2891 | |
2892 /* See if the value matches what the left hand side | |
2893 wants. */ | |
2894 do_jump_if_equal (mode, index, | |
2895 convert_modes (mode, imode, | |
2896 expand_normal (node->left->low), | |
2897 unsignedp), | |
2898 label_rtx (node->left->code_label), | |
2899 unsignedp); | |
2900 } | |
2901 | |
2902 else | |
2903 { | |
2904 /* Neither node is bounded. First distinguish the two sides; | |
2905 then emit the code for one side at a time. */ | |
2906 | |
2907 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
2908 | |
2909 /* See if the value is on the right. */ | |
2910 emit_cmp_and_jump_insns (index, | |
2911 convert_modes | |
2912 (mode, imode, | |
2913 expand_normal (node->high), | |
2914 unsignedp), | |
2915 GT, NULL_RTX, mode, unsignedp, | |
2916 label_rtx (test_label)); | |
2917 | |
2918 /* Value must be on the left. | |
2919 Handle the left-hand subtree. */ | |
2920 emit_case_nodes (index, node->left, default_label, index_type); | |
2921 /* If left-hand subtree does nothing, | |
2922 go to default. */ | |
2923 if (default_label) | |
2924 emit_jump (default_label); | |
2925 | |
2926 /* Code branches here for the right-hand subtree. */ | |
2927 expand_label (test_label); | |
2928 emit_case_nodes (index, node->right, default_label, index_type); | |
2929 } | |
2930 } | |
2931 | |
2932 else if (node->right != 0 && node->left == 0) | |
2933 { | |
2934 /* Here we have a right child but no left so we issue a conditional | |
2935 branch to default and process the right child. | |
2936 | |
2937 Omit the conditional branch to default if the right child | |
2938 does not have any children and is single valued; it would | |
2939 cost too much space to save so little time. */ | |
2940 | |
2941 if (node->right->right || node->right->left | |
2942 || !tree_int_cst_equal (node->right->low, node->right->high)) | |
2943 { | |
2944 if (!node_has_low_bound (node, index_type)) | |
2945 { | |
2946 emit_cmp_and_jump_insns (index, | |
2947 convert_modes | |
2948 (mode, imode, | |
2949 expand_normal (node->high), | |
2950 unsignedp), | |
2951 LT, NULL_RTX, mode, unsignedp, | |
2952 default_label); | |
2953 } | |
2954 | |
2955 emit_case_nodes (index, node->right, default_label, index_type); | |
2956 } | |
2957 else | |
2958 /* We cannot process node->right normally | |
2959 since we haven't ruled out the numbers less than | |
2960 this node's value. So handle node->right explicitly. */ | |
2961 do_jump_if_equal (mode, index, | |
2962 convert_modes | |
2963 (mode, imode, | |
2964 expand_normal (node->right->low), | |
2965 unsignedp), | |
2966 label_rtx (node->right->code_label), unsignedp); | |
2967 } | |
2968 | |
2969 else if (node->right == 0 && node->left != 0) | |
2970 { | |
2971 /* Just one subtree, on the left. */ | |
2972 if (node->left->left || node->left->right | |
2973 || !tree_int_cst_equal (node->left->low, node->left->high)) | |
2974 { | |
2975 if (!node_has_high_bound (node, index_type)) | |
2976 { | |
2977 emit_cmp_and_jump_insns (index, | |
2978 convert_modes | |
2979 (mode, imode, | |
2980 expand_normal (node->high), | |
2981 unsignedp), | |
2982 GT, NULL_RTX, mode, unsignedp, | |
2983 default_label); | |
2984 } | |
2985 | |
2986 emit_case_nodes (index, node->left, default_label, index_type); | |
2987 } | |
2988 else | |
2989 /* We cannot process node->left normally | |
2990 since we haven't ruled out the numbers less than | |
2991 this node's value. So handle node->left explicitly. */ | |
2992 do_jump_if_equal (mode, index, | |
2993 convert_modes | |
2994 (mode, imode, | |
2995 expand_normal (node->left->low), | |
2996 unsignedp), | |
2997 label_rtx (node->left->code_label), unsignedp); | |
2998 } | |
2999 } | |
3000 else | |
3001 { | |
3002 /* Node is a range. These cases are very similar to those for a single | |
3003 value, except that we do not start by testing whether this node | |
3004 is the one to branch to. */ | |
3005 | |
3006 if (node->right != 0 && node->left != 0) | |
3007 { | |
3008 /* Node has subtrees on both sides. | |
3009 If the right-hand subtree is bounded, | |
3010 test for it first, since we can go straight there. | |
3011 Otherwise, we need to make a branch in the control structure, | |
3012 then handle the two subtrees. */ | |
3013 tree test_label = 0; | |
3014 | |
3015 if (node_is_bounded (node->right, index_type)) | |
3016 /* Right hand node is fully bounded so we can eliminate any | |
3017 testing and branch directly to the target code. */ | |
3018 emit_cmp_and_jump_insns (index, | |
3019 convert_modes | |
3020 (mode, imode, | |
3021 expand_normal (node->high), | |
3022 unsignedp), | |
3023 GT, NULL_RTX, mode, unsignedp, | |
3024 label_rtx (node->right->code_label)); | |
3025 else | |
3026 { | |
3027 /* Right hand node requires testing. | |
3028 Branch to a label where we will handle it later. */ | |
3029 | |
3030 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
3031 emit_cmp_and_jump_insns (index, | |
3032 convert_modes | |
3033 (mode, imode, | |
3034 expand_normal (node->high), | |
3035 unsignedp), | |
3036 GT, NULL_RTX, mode, unsignedp, | |
3037 label_rtx (test_label)); | |
3038 } | |
3039 | |
3040 /* Value belongs to this node or to the left-hand subtree. */ | |
3041 | |
3042 emit_cmp_and_jump_insns (index, | |
3043 convert_modes | |
3044 (mode, imode, | |
3045 expand_normal (node->low), | |
3046 unsignedp), | |
3047 GE, NULL_RTX, mode, unsignedp, | |
3048 label_rtx (node->code_label)); | |
3049 | |
3050 /* Handle the left-hand subtree. */ | |
3051 emit_case_nodes (index, node->left, default_label, index_type); | |
3052 | |
3053 /* If right node had to be handled later, do that now. */ | |
3054 | |
3055 if (test_label) | |
3056 { | |
3057 /* If the left-hand subtree fell through, | |
3058 don't let it fall into the right-hand subtree. */ | |
3059 if (default_label) | |
3060 emit_jump (default_label); | |
3061 | |
3062 expand_label (test_label); | |
3063 emit_case_nodes (index, node->right, default_label, index_type); | |
3064 } | |
3065 } | |
3066 | |
3067 else if (node->right != 0 && node->left == 0) | |
3068 { | |
3069 /* Deal with values to the left of this node, | |
3070 if they are possible. */ | |
3071 if (!node_has_low_bound (node, index_type)) | |
3072 { | |
3073 emit_cmp_and_jump_insns (index, | |
3074 convert_modes | |
3075 (mode, imode, | |
3076 expand_normal (node->low), | |
3077 unsignedp), | |
3078 LT, NULL_RTX, mode, unsignedp, | |
3079 default_label); | |
3080 } | |
3081 | |
3082 /* Value belongs to this node or to the right-hand subtree. */ | |
3083 | |
3084 emit_cmp_and_jump_insns (index, | |
3085 convert_modes | |
3086 (mode, imode, | |
3087 expand_normal (node->high), | |
3088 unsignedp), | |
3089 LE, NULL_RTX, mode, unsignedp, | |
3090 label_rtx (node->code_label)); | |
3091 | |
3092 emit_case_nodes (index, node->right, default_label, index_type); | |
3093 } | |
3094 | |
3095 else if (node->right == 0 && node->left != 0) | |
3096 { | |
3097 /* Deal with values to the right of this node, | |
3098 if they are possible. */ | |
3099 if (!node_has_high_bound (node, index_type)) | |
3100 { | |
3101 emit_cmp_and_jump_insns (index, | |
3102 convert_modes | |
3103 (mode, imode, | |
3104 expand_normal (node->high), | |
3105 unsignedp), | |
3106 GT, NULL_RTX, mode, unsignedp, | |
3107 default_label); | |
3108 } | |
3109 | |
3110 /* Value belongs to this node or to the left-hand subtree. */ | |
3111 | |
3112 emit_cmp_and_jump_insns (index, | |
3113 convert_modes | |
3114 (mode, imode, | |
3115 expand_normal (node->low), | |
3116 unsignedp), | |
3117 GE, NULL_RTX, mode, unsignedp, | |
3118 label_rtx (node->code_label)); | |
3119 | |
3120 emit_case_nodes (index, node->left, default_label, index_type); | |
3121 } | |
3122 | |
3123 else | |
3124 { | |
3125 /* Node has no children so we check low and high bounds to remove | |
3126 redundant tests. Only one of the bounds can exist, | |
3127 since otherwise this node is bounded--a case tested already. */ | |
3128 int high_bound = node_has_high_bound (node, index_type); | |
3129 int low_bound = node_has_low_bound (node, index_type); | |
3130 | |
3131 if (!high_bound && low_bound) | |
3132 { | |
3133 emit_cmp_and_jump_insns (index, | |
3134 convert_modes | |
3135 (mode, imode, | |
3136 expand_normal (node->high), | |
3137 unsignedp), | |
3138 GT, NULL_RTX, mode, unsignedp, | |
3139 default_label); | |
3140 } | |
3141 | |
3142 else if (!low_bound && high_bound) | |
3143 { | |
3144 emit_cmp_and_jump_insns (index, | |
3145 convert_modes | |
3146 (mode, imode, | |
3147 expand_normal (node->low), | |
3148 unsignedp), | |
3149 LT, NULL_RTX, mode, unsignedp, | |
3150 default_label); | |
3151 } | |
3152 else if (!low_bound && !high_bound) | |
3153 { | |
3154 /* Widen LOW and HIGH to the same width as INDEX. */ | |
3155 tree type = lang_hooks.types.type_for_mode (mode, unsignedp); | |
3156 tree low = build1 (CONVERT_EXPR, type, node->low); | |
3157 tree high = build1 (CONVERT_EXPR, type, node->high); | |
3158 rtx low_rtx, new_index, new_bound; | |
3159 | |
3160 /* Instead of doing two branches, emit one unsigned branch for | |
3161 (index-low) > (high-low). */ | |
3162 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL); | |
3163 new_index = expand_simple_binop (mode, MINUS, index, low_rtx, | |
3164 NULL_RTX, unsignedp, | |
3165 OPTAB_WIDEN); | |
3166 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type, | |
3167 high, low), | |
3168 NULL_RTX, mode, EXPAND_NORMAL); | |
3169 | |
3170 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX, | |
3171 mode, 1, default_label); | |
3172 } | |
3173 | |
3174 emit_jump (label_rtx (node->code_label)); | |
3175 } | |
3176 } | |
3177 } |