Mercurial > hg > CbC > CbC_gcc
comparison gcc/stmt.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Expands front end tree to back end RTL for GCC | |
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, | |
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 /* This file handles the generation of rtl code from tree structure | |
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c. | |
24 The functions whose names start with `expand_' are called by the | |
25 expander to generate RTL instructions for various kinds of constructs. */ | |
26 | |
27 #include "config.h" | |
28 #include "system.h" | |
29 #include "coretypes.h" | |
30 #include "tm.h" | |
31 | |
32 #include "rtl.h" | |
33 #include "hard-reg-set.h" | |
34 #include "tree.h" | |
35 #include "tm_p.h" | |
36 #include "flags.h" | |
37 #include "except.h" | |
38 #include "function.h" | |
39 #include "insn-config.h" | |
40 #include "expr.h" | |
41 #include "libfuncs.h" | |
42 #include "recog.h" | |
43 #include "machmode.h" | |
44 #include "toplev.h" | |
45 #include "output.h" | |
46 #include "ggc.h" | |
47 #include "langhooks.h" | |
48 #include "predict.h" | |
49 #include "optabs.h" | |
50 #include "target.h" | |
51 #include "regs.h" | |
52 #include "alloc-pool.h" | |
53 | |
54 /* Functions and data structures for expanding case statements. */ | |
55 | |
56 /* Case label structure, used to hold info on labels within case | |
57 statements. We handle "range" labels; for a single-value label | |
58 as in C, the high and low limits are the same. | |
59 | |
60 We start with a vector of case nodes sorted in ascending order, and | |
61 the default label as the last element in the vector. Before expanding | |
62 to RTL, we transform this vector into a list linked via the RIGHT | |
63 fields in the case_node struct. Nodes with higher case values are | |
64 later in the list. | |
65 | |
66 Switch statements can be output in three forms. A branch table is | |
67 used if there are more than a few labels and the labels are dense | |
68 within the range between the smallest and largest case value. If a | |
69 branch table is used, no further manipulations are done with the case | |
70 node chain. | |
71 | |
72 The alternative to the use of a branch table is to generate a series | |
73 of compare and jump insns. When that is done, we use the LEFT, RIGHT, | |
74 and PARENT fields to hold a binary tree. Initially the tree is | |
75 totally unbalanced, with everything on the right. We balance the tree | |
76 with nodes on the left having lower case values than the parent | |
77 and nodes on the right having higher values. We then output the tree | |
78 in order. | |
79 | |
80 For very small, suitable switch statements, we can generate a series | |
81 of simple bit test and branches instead. */ | |
82 | |
83 struct case_node | |
84 { | |
85 struct case_node *left; /* Left son in binary tree */ | |
86 struct case_node *right; /* Right son in binary tree; also node chain */ | |
87 struct case_node *parent; /* Parent of node in binary tree */ | |
88 tree low; /* Lowest index value for this label */ | |
89 tree high; /* Highest index value for this label */ | |
90 tree code_label; /* Label to jump to when node matches */ | |
91 }; | |
92 | |
93 typedef struct case_node case_node; | |
94 typedef struct case_node *case_node_ptr; | |
95 | |
96 /* These are used by estimate_case_costs and balance_case_nodes. */ | |
97 | |
98 /* This must be a signed type, and non-ANSI compilers lack signed char. */ | |
99 static short cost_table_[129]; | |
100 static int use_cost_table; | |
101 static int cost_table_initialized; | |
102 | |
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW | |
104 is unsigned. */ | |
105 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)] | |
106 | |
107 static int n_occurrences (int, const char *); | |
108 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *); | |
109 static void expand_nl_goto_receiver (void); | |
110 static bool check_operand_nalternatives (tree, tree); | |
111 static bool check_unique_operand_names (tree, tree); | |
112 static char *resolve_operand_name_1 (char *, tree, tree); | |
113 static void expand_null_return_1 (void); | |
114 static void expand_value_return (rtx); | |
115 static int estimate_case_costs (case_node_ptr); | |
116 static bool lshift_cheap_p (void); | |
117 static int case_bit_test_cmp (const void *, const void *); | |
118 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx); | |
119 static void balance_case_nodes (case_node_ptr *, case_node_ptr); | |
120 static int node_has_low_bound (case_node_ptr, tree); | |
121 static int node_has_high_bound (case_node_ptr, tree); | |
122 static int node_is_bounded (case_node_ptr, tree); | |
123 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree); | |
124 static struct case_node *add_case_node (struct case_node *, tree, | |
125 tree, tree, tree, alloc_pool); | |
126 | |
127 | |
128 /* Return the rtx-label that corresponds to a LABEL_DECL, | |
129 creating it if necessary. */ | |
130 | |
131 rtx | |
132 label_rtx (tree label) | |
133 { | |
134 gcc_assert (TREE_CODE (label) == LABEL_DECL); | |
135 | |
136 if (!DECL_RTL_SET_P (label)) | |
137 { | |
138 rtx r = gen_label_rtx (); | |
139 SET_DECL_RTL (label, r); | |
140 if (FORCED_LABEL (label) || DECL_NONLOCAL (label)) | |
141 LABEL_PRESERVE_P (r) = 1; | |
142 } | |
143 | |
144 return DECL_RTL (label); | |
145 } | |
146 | |
147 /* As above, but also put it on the forced-reference list of the | |
148 function that contains it. */ | |
149 rtx | |
150 force_label_rtx (tree label) | |
151 { | |
152 rtx ref = label_rtx (label); | |
153 tree function = decl_function_context (label); | |
154 | |
155 gcc_assert (function); | |
156 | |
157 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels); | |
158 return ref; | |
159 } | |
160 | |
161 /* Add an unconditional jump to LABEL as the next sequential instruction. */ | |
162 | |
163 void | |
164 emit_jump (rtx label) | |
165 { | |
166 do_pending_stack_adjust (); | |
167 emit_jump_insn (gen_jump (label)); | |
168 emit_barrier (); | |
169 } | |
170 | |
171 /* Emit code to jump to the address | |
172 specified by the pointer expression EXP. */ | |
173 | |
174 void | |
175 expand_computed_goto (tree exp) | |
176 { | |
177 rtx x = expand_normal (exp); | |
178 | |
179 x = convert_memory_address (Pmode, x); | |
180 | |
181 do_pending_stack_adjust (); | |
182 emit_indirect_jump (x); | |
183 } | |
184 | |
185 /* Handle goto statements and the labels that they can go to. */ | |
186 | |
187 /* Specify the location in the RTL code of a label LABEL, | |
188 which is a LABEL_DECL tree node. | |
189 | |
190 This is used for the kind of label that the user can jump to with a | |
191 goto statement, and for alternatives of a switch or case statement. | |
192 RTL labels generated for loops and conditionals don't go through here; | |
193 they are generated directly at the RTL level, by other functions below. | |
194 | |
195 Note that this has nothing to do with defining label *names*. | |
196 Languages vary in how they do that and what that even means. */ | |
197 | |
198 void | |
199 expand_label (tree label) | |
200 { | |
201 rtx label_r = label_rtx (label); | |
202 | |
203 do_pending_stack_adjust (); | |
204 emit_label (label_r); | |
205 if (DECL_NAME (label)) | |
206 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label)); | |
207 | |
208 if (DECL_NONLOCAL (label)) | |
209 { | |
210 expand_nl_goto_receiver (); | |
211 nonlocal_goto_handler_labels | |
212 = gen_rtx_EXPR_LIST (VOIDmode, label_r, | |
213 nonlocal_goto_handler_labels); | |
214 } | |
215 | |
216 if (FORCED_LABEL (label)) | |
217 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels); | |
218 | |
219 if (DECL_NONLOCAL (label) || FORCED_LABEL (label)) | |
220 maybe_set_first_label_num (label_r); | |
221 } | |
222 | |
223 /* Generate RTL code for a `goto' statement with target label LABEL. | |
224 LABEL should be a LABEL_DECL tree node that was or will later be | |
225 defined with `expand_label'. */ | |
226 | |
227 void | |
228 expand_goto (tree label) | |
229 { | |
230 #ifdef ENABLE_CHECKING | |
231 /* Check for a nonlocal goto to a containing function. Should have | |
232 gotten translated to __builtin_nonlocal_goto. */ | |
233 tree context = decl_function_context (label); | |
234 gcc_assert (!context || context == current_function_decl); | |
235 #endif | |
236 | |
237 emit_jump (label_rtx (label)); | |
238 } | |
239 | |
240 /* Return the number of times character C occurs in string S. */ | |
241 static int | |
242 n_occurrences (int c, const char *s) | |
243 { | |
244 int n = 0; | |
245 while (*s) | |
246 n += (*s++ == c); | |
247 return n; | |
248 } | |
249 | |
250 /* Generate RTL for an asm statement (explicit assembler code). | |
251 STRING is a STRING_CST node containing the assembler code text, | |
252 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the | |
253 insn is volatile; don't optimize it. */ | |
254 | |
255 static void | |
256 expand_asm_loc (tree string, int vol, location_t locus) | |
257 { | |
258 rtx body; | |
259 | |
260 if (TREE_CODE (string) == ADDR_EXPR) | |
261 string = TREE_OPERAND (string, 0); | |
262 | |
263 body = gen_rtx_ASM_INPUT_loc (VOIDmode, | |
264 ggc_strdup (TREE_STRING_POINTER (string)), | |
265 locus); | |
266 | |
267 MEM_VOLATILE_P (body) = vol; | |
268 | |
269 emit_insn (body); | |
270 } | |
271 | |
272 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the | |
273 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS | |
274 inputs and NOUTPUTS outputs to this extended-asm. Upon return, | |
275 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a | |
276 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the | |
277 constraint allows the use of a register operand. And, *IS_INOUT | |
278 will be true if the operand is read-write, i.e., if it is used as | |
279 an input as well as an output. If *CONSTRAINT_P is not in | |
280 canonical form, it will be made canonical. (Note that `+' will be | |
281 replaced with `=' as part of this process.) | |
282 | |
283 Returns TRUE if all went well; FALSE if an error occurred. */ | |
284 | |
285 bool | |
286 parse_output_constraint (const char **constraint_p, int operand_num, | |
287 int ninputs, int noutputs, bool *allows_mem, | |
288 bool *allows_reg, bool *is_inout) | |
289 { | |
290 const char *constraint = *constraint_p; | |
291 const char *p; | |
292 | |
293 /* Assume the constraint doesn't allow the use of either a register | |
294 or memory. */ | |
295 *allows_mem = false; | |
296 *allows_reg = false; | |
297 | |
298 /* Allow the `=' or `+' to not be at the beginning of the string, | |
299 since it wasn't explicitly documented that way, and there is a | |
300 large body of code that puts it last. Swap the character to | |
301 the front, so as not to uglify any place else. */ | |
302 p = strchr (constraint, '='); | |
303 if (!p) | |
304 p = strchr (constraint, '+'); | |
305 | |
306 /* If the string doesn't contain an `=', issue an error | |
307 message. */ | |
308 if (!p) | |
309 { | |
310 error ("output operand constraint lacks %<=%>"); | |
311 return false; | |
312 } | |
313 | |
314 /* If the constraint begins with `+', then the operand is both read | |
315 from and written to. */ | |
316 *is_inout = (*p == '+'); | |
317 | |
318 /* Canonicalize the output constraint so that it begins with `='. */ | |
319 if (p != constraint || *is_inout) | |
320 { | |
321 char *buf; | |
322 size_t c_len = strlen (constraint); | |
323 | |
324 if (p != constraint) | |
325 warning (0, "output constraint %qc for operand %d " | |
326 "is not at the beginning", | |
327 *p, operand_num); | |
328 | |
329 /* Make a copy of the constraint. */ | |
330 buf = XALLOCAVEC (char, c_len + 1); | |
331 strcpy (buf, constraint); | |
332 /* Swap the first character and the `=' or `+'. */ | |
333 buf[p - constraint] = buf[0]; | |
334 /* Make sure the first character is an `='. (Until we do this, | |
335 it might be a `+'.) */ | |
336 buf[0] = '='; | |
337 /* Replace the constraint with the canonicalized string. */ | |
338 *constraint_p = ggc_alloc_string (buf, c_len); | |
339 constraint = *constraint_p; | |
340 } | |
341 | |
342 /* Loop through the constraint string. */ | |
343 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p)) | |
344 switch (*p) | |
345 { | |
346 case '+': | |
347 case '=': | |
348 error ("operand constraint contains incorrectly positioned " | |
349 "%<+%> or %<=%>"); | |
350 return false; | |
351 | |
352 case '%': | |
353 if (operand_num + 1 == ninputs + noutputs) | |
354 { | |
355 error ("%<%%%> constraint used with last operand"); | |
356 return false; | |
357 } | |
358 break; | |
359 | |
360 case 'V': case TARGET_MEM_CONSTRAINT: case 'o': | |
361 *allows_mem = true; | |
362 break; | |
363 | |
364 case '?': case '!': case '*': case '&': case '#': | |
365 case 'E': case 'F': case 'G': case 'H': | |
366 case 's': case 'i': case 'n': | |
367 case 'I': case 'J': case 'K': case 'L': case 'M': | |
368 case 'N': case 'O': case 'P': case ',': | |
369 break; | |
370 | |
371 case '0': case '1': case '2': case '3': case '4': | |
372 case '5': case '6': case '7': case '8': case '9': | |
373 case '[': | |
374 error ("matching constraint not valid in output operand"); | |
375 return false; | |
376 | |
377 case '<': case '>': | |
378 /* ??? Before flow, auto inc/dec insns are not supposed to exist, | |
379 excepting those that expand_call created. So match memory | |
380 and hope. */ | |
381 *allows_mem = true; | |
382 break; | |
383 | |
384 case 'g': case 'X': | |
385 *allows_reg = true; | |
386 *allows_mem = true; | |
387 break; | |
388 | |
389 case 'p': case 'r': | |
390 *allows_reg = true; | |
391 break; | |
392 | |
393 default: | |
394 if (!ISALPHA (*p)) | |
395 break; | |
396 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS) | |
397 *allows_reg = true; | |
398 #ifdef EXTRA_CONSTRAINT_STR | |
399 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p)) | |
400 *allows_reg = true; | |
401 else if (EXTRA_MEMORY_CONSTRAINT (*p, p)) | |
402 *allows_mem = true; | |
403 else | |
404 { | |
405 /* Otherwise we can't assume anything about the nature of | |
406 the constraint except that it isn't purely registers. | |
407 Treat it like "g" and hope for the best. */ | |
408 *allows_reg = true; | |
409 *allows_mem = true; | |
410 } | |
411 #endif | |
412 break; | |
413 } | |
414 | |
415 return true; | |
416 } | |
417 | |
418 /* Similar, but for input constraints. */ | |
419 | |
420 bool | |
421 parse_input_constraint (const char **constraint_p, int input_num, | |
422 int ninputs, int noutputs, int ninout, | |
423 const char * const * constraints, | |
424 bool *allows_mem, bool *allows_reg) | |
425 { | |
426 const char *constraint = *constraint_p; | |
427 const char *orig_constraint = constraint; | |
428 size_t c_len = strlen (constraint); | |
429 size_t j; | |
430 bool saw_match = false; | |
431 | |
432 /* Assume the constraint doesn't allow the use of either | |
433 a register or memory. */ | |
434 *allows_mem = false; | |
435 *allows_reg = false; | |
436 | |
437 /* Make sure constraint has neither `=', `+', nor '&'. */ | |
438 | |
439 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j)) | |
440 switch (constraint[j]) | |
441 { | |
442 case '+': case '=': case '&': | |
443 if (constraint == orig_constraint) | |
444 { | |
445 error ("input operand constraint contains %qc", constraint[j]); | |
446 return false; | |
447 } | |
448 break; | |
449 | |
450 case '%': | |
451 if (constraint == orig_constraint | |
452 && input_num + 1 == ninputs - ninout) | |
453 { | |
454 error ("%<%%%> constraint used with last operand"); | |
455 return false; | |
456 } | |
457 break; | |
458 | |
459 case 'V': case TARGET_MEM_CONSTRAINT: case 'o': | |
460 *allows_mem = true; | |
461 break; | |
462 | |
463 case '<': case '>': | |
464 case '?': case '!': case '*': case '#': | |
465 case 'E': case 'F': case 'G': case 'H': | |
466 case 's': case 'i': case 'n': | |
467 case 'I': case 'J': case 'K': case 'L': case 'M': | |
468 case 'N': case 'O': case 'P': case ',': | |
469 break; | |
470 | |
471 /* Whether or not a numeric constraint allows a register is | |
472 decided by the matching constraint, and so there is no need | |
473 to do anything special with them. We must handle them in | |
474 the default case, so that we don't unnecessarily force | |
475 operands to memory. */ | |
476 case '0': case '1': case '2': case '3': case '4': | |
477 case '5': case '6': case '7': case '8': case '9': | |
478 { | |
479 char *end; | |
480 unsigned long match; | |
481 | |
482 saw_match = true; | |
483 | |
484 match = strtoul (constraint + j, &end, 10); | |
485 if (match >= (unsigned long) noutputs) | |
486 { | |
487 error ("matching constraint references invalid operand number"); | |
488 return false; | |
489 } | |
490 | |
491 /* Try and find the real constraint for this dup. Only do this | |
492 if the matching constraint is the only alternative. */ | |
493 if (*end == '\0' | |
494 && (j == 0 || (j == 1 && constraint[0] == '%'))) | |
495 { | |
496 constraint = constraints[match]; | |
497 *constraint_p = constraint; | |
498 c_len = strlen (constraint); | |
499 j = 0; | |
500 /* ??? At the end of the loop, we will skip the first part of | |
501 the matched constraint. This assumes not only that the | |
502 other constraint is an output constraint, but also that | |
503 the '=' or '+' come first. */ | |
504 break; | |
505 } | |
506 else | |
507 j = end - constraint; | |
508 /* Anticipate increment at end of loop. */ | |
509 j--; | |
510 } | |
511 /* Fall through. */ | |
512 | |
513 case 'p': case 'r': | |
514 *allows_reg = true; | |
515 break; | |
516 | |
517 case 'g': case 'X': | |
518 *allows_reg = true; | |
519 *allows_mem = true; | |
520 break; | |
521 | |
522 default: | |
523 if (! ISALPHA (constraint[j])) | |
524 { | |
525 error ("invalid punctuation %qc in constraint", constraint[j]); | |
526 return false; | |
527 } | |
528 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j) | |
529 != NO_REGS) | |
530 *allows_reg = true; | |
531 #ifdef EXTRA_CONSTRAINT_STR | |
532 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j)) | |
533 *allows_reg = true; | |
534 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j)) | |
535 *allows_mem = true; | |
536 else | |
537 { | |
538 /* Otherwise we can't assume anything about the nature of | |
539 the constraint except that it isn't purely registers. | |
540 Treat it like "g" and hope for the best. */ | |
541 *allows_reg = true; | |
542 *allows_mem = true; | |
543 } | |
544 #endif | |
545 break; | |
546 } | |
547 | |
548 if (saw_match && !*allows_reg) | |
549 warning (0, "matching constraint does not allow a register"); | |
550 | |
551 return true; | |
552 } | |
553 | |
554 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL | |
555 can be an asm-declared register. Called via walk_tree. */ | |
556 | |
557 static tree | |
558 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
559 void *data) | |
560 { | |
561 tree decl = *declp; | |
562 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data; | |
563 | |
564 if (TREE_CODE (decl) == VAR_DECL) | |
565 { | |
566 if (DECL_HARD_REGISTER (decl) | |
567 && REG_P (DECL_RTL (decl)) | |
568 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER) | |
569 { | |
570 rtx reg = DECL_RTL (decl); | |
571 | |
572 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg))) | |
573 return decl; | |
574 } | |
575 walk_subtrees = 0; | |
576 } | |
577 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL) | |
578 walk_subtrees = 0; | |
579 return NULL_TREE; | |
580 } | |
581 | |
582 /* If there is an overlap between *REGS and DECL, return the first overlap | |
583 found. */ | |
584 tree | |
585 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs) | |
586 { | |
587 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL); | |
588 } | |
589 | |
590 /* Check for overlap between registers marked in CLOBBERED_REGS and | |
591 anything inappropriate in T. Emit error and return the register | |
592 variable definition for error, NULL_TREE for ok. */ | |
593 | |
594 static bool | |
595 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs) | |
596 { | |
597 /* Conflicts between asm-declared register variables and the clobber | |
598 list are not allowed. */ | |
599 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs); | |
600 | |
601 if (overlap) | |
602 { | |
603 error ("asm-specifier for variable %qs conflicts with asm clobber list", | |
604 IDENTIFIER_POINTER (DECL_NAME (overlap))); | |
605 | |
606 /* Reset registerness to stop multiple errors emitted for a single | |
607 variable. */ | |
608 DECL_REGISTER (overlap) = 0; | |
609 return true; | |
610 } | |
611 | |
612 return false; | |
613 } | |
614 | |
615 /* Generate RTL for an asm statement with arguments. | |
616 STRING is the instruction template. | |
617 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. | |
618 Each output or input has an expression in the TREE_VALUE and | |
619 a tree list in TREE_PURPOSE which in turn contains a constraint | |
620 name in TREE_VALUE (or NULL_TREE) and a constraint string | |
621 in TREE_PURPOSE. | |
622 CLOBBERS is a list of STRING_CST nodes each naming a hard register | |
623 that is clobbered by this insn. | |
624 | |
625 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. | |
626 Some elements of OUTPUTS may be replaced with trees representing temporary | |
627 values. The caller should copy those temporary values to the originally | |
628 specified lvalues. | |
629 | |
630 VOL nonzero means the insn is volatile; don't optimize it. */ | |
631 | |
632 static void | |
633 expand_asm_operands (tree string, tree outputs, tree inputs, | |
634 tree clobbers, int vol, location_t locus) | |
635 { | |
636 rtvec argvec, constraintvec; | |
637 rtx body; | |
638 int ninputs = list_length (inputs); | |
639 int noutputs = list_length (outputs); | |
640 int ninout; | |
641 int nclobbers; | |
642 HARD_REG_SET clobbered_regs; | |
643 int clobber_conflict_found = 0; | |
644 tree tail; | |
645 tree t; | |
646 int i; | |
647 /* Vector of RTX's of evaluated output operands. */ | |
648 rtx *output_rtx = XALLOCAVEC (rtx, noutputs); | |
649 int *inout_opnum = XALLOCAVEC (int, noutputs); | |
650 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs); | |
651 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs); | |
652 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs); | |
653 int old_generating_concat_p = generating_concat_p; | |
654 | |
655 /* An ASM with no outputs needs to be treated as volatile, for now. */ | |
656 if (noutputs == 0) | |
657 vol = 1; | |
658 | |
659 if (! check_operand_nalternatives (outputs, inputs)) | |
660 return; | |
661 | |
662 string = resolve_asm_operand_names (string, outputs, inputs); | |
663 | |
664 /* Collect constraints. */ | |
665 i = 0; | |
666 for (t = outputs; t ; t = TREE_CHAIN (t), i++) | |
667 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
668 for (t = inputs; t ; t = TREE_CHAIN (t), i++) | |
669 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
670 | |
671 /* Sometimes we wish to automatically clobber registers across an asm. | |
672 Case in point is when the i386 backend moved from cc0 to a hard reg -- | |
673 maintaining source-level compatibility means automatically clobbering | |
674 the flags register. */ | |
675 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers); | |
676 | |
677 /* Count the number of meaningful clobbered registers, ignoring what | |
678 we would ignore later. */ | |
679 nclobbers = 0; | |
680 CLEAR_HARD_REG_SET (clobbered_regs); | |
681 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) | |
682 { | |
683 const char *regname; | |
684 | |
685 if (TREE_VALUE (tail) == error_mark_node) | |
686 return; | |
687 regname = TREE_STRING_POINTER (TREE_VALUE (tail)); | |
688 | |
689 i = decode_reg_name (regname); | |
690 if (i >= 0 || i == -4) | |
691 ++nclobbers; | |
692 else if (i == -2) | |
693 error ("unknown register name %qs in %<asm%>", regname); | |
694 | |
695 /* Mark clobbered registers. */ | |
696 if (i >= 0) | |
697 { | |
698 /* Clobbering the PIC register is an error. */ | |
699 if (i == (int) PIC_OFFSET_TABLE_REGNUM) | |
700 { | |
701 error ("PIC register %qs clobbered in %<asm%>", regname); | |
702 return; | |
703 } | |
704 | |
705 SET_HARD_REG_BIT (clobbered_regs, i); | |
706 } | |
707 } | |
708 | |
709 /* First pass over inputs and outputs checks validity and sets | |
710 mark_addressable if needed. */ | |
711 | |
712 ninout = 0; | |
713 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
714 { | |
715 tree val = TREE_VALUE (tail); | |
716 tree type = TREE_TYPE (val); | |
717 const char *constraint; | |
718 bool is_inout; | |
719 bool allows_reg; | |
720 bool allows_mem; | |
721 | |
722 /* If there's an erroneous arg, emit no insn. */ | |
723 if (type == error_mark_node) | |
724 return; | |
725 | |
726 /* Try to parse the output constraint. If that fails, there's | |
727 no point in going further. */ | |
728 constraint = constraints[i]; | |
729 if (!parse_output_constraint (&constraint, i, ninputs, noutputs, | |
730 &allows_mem, &allows_reg, &is_inout)) | |
731 return; | |
732 | |
733 if (! allows_reg | |
734 && (allows_mem | |
735 || is_inout | |
736 || (DECL_P (val) | |
737 && REG_P (DECL_RTL (val)) | |
738 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))) | |
739 lang_hooks.mark_addressable (val); | |
740 | |
741 if (is_inout) | |
742 ninout++; | |
743 } | |
744 | |
745 ninputs += ninout; | |
746 if (ninputs + noutputs > MAX_RECOG_OPERANDS) | |
747 { | |
748 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS); | |
749 return; | |
750 } | |
751 | |
752 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail)) | |
753 { | |
754 bool allows_reg, allows_mem; | |
755 const char *constraint; | |
756 | |
757 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT | |
758 would get VOIDmode and that could cause a crash in reload. */ | |
759 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node) | |
760 return; | |
761 | |
762 constraint = constraints[i + noutputs]; | |
763 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, | |
764 constraints, &allows_mem, &allows_reg)) | |
765 return; | |
766 | |
767 if (! allows_reg && allows_mem) | |
768 lang_hooks.mark_addressable (TREE_VALUE (tail)); | |
769 } | |
770 | |
771 /* Second pass evaluates arguments. */ | |
772 | |
773 ninout = 0; | |
774 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
775 { | |
776 tree val = TREE_VALUE (tail); | |
777 tree type = TREE_TYPE (val); | |
778 bool is_inout; | |
779 bool allows_reg; | |
780 bool allows_mem; | |
781 rtx op; | |
782 bool ok; | |
783 | |
784 ok = parse_output_constraint (&constraints[i], i, ninputs, | |
785 noutputs, &allows_mem, &allows_reg, | |
786 &is_inout); | |
787 gcc_assert (ok); | |
788 | |
789 /* If an output operand is not a decl or indirect ref and our constraint | |
790 allows a register, make a temporary to act as an intermediate. | |
791 Make the asm insn write into that, then our caller will copy it to | |
792 the real output operand. Likewise for promoted variables. */ | |
793 | |
794 generating_concat_p = 0; | |
795 | |
796 real_output_rtx[i] = NULL_RTX; | |
797 if ((TREE_CODE (val) == INDIRECT_REF | |
798 && allows_mem) | |
799 || (DECL_P (val) | |
800 && (allows_mem || REG_P (DECL_RTL (val))) | |
801 && ! (REG_P (DECL_RTL (val)) | |
802 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))) | |
803 || ! allows_reg | |
804 || is_inout) | |
805 { | |
806 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
807 if (MEM_P (op)) | |
808 op = validize_mem (op); | |
809 | |
810 if (! allows_reg && !MEM_P (op)) | |
811 error ("output number %d not directly addressable", i); | |
812 if ((! allows_mem && MEM_P (op)) | |
813 || GET_CODE (op) == CONCAT) | |
814 { | |
815 real_output_rtx[i] = op; | |
816 op = gen_reg_rtx (GET_MODE (op)); | |
817 if (is_inout) | |
818 emit_move_insn (op, real_output_rtx[i]); | |
819 } | |
820 } | |
821 else | |
822 { | |
823 op = assign_temp (type, 0, 0, 1); | |
824 op = validize_mem (op); | |
825 TREE_VALUE (tail) = make_tree (type, op); | |
826 } | |
827 output_rtx[i] = op; | |
828 | |
829 generating_concat_p = old_generating_concat_p; | |
830 | |
831 if (is_inout) | |
832 { | |
833 inout_mode[ninout] = TYPE_MODE (type); | |
834 inout_opnum[ninout++] = i; | |
835 } | |
836 | |
837 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) | |
838 clobber_conflict_found = 1; | |
839 } | |
840 | |
841 /* Make vectors for the expression-rtx, constraint strings, | |
842 and named operands. */ | |
843 | |
844 argvec = rtvec_alloc (ninputs); | |
845 constraintvec = rtvec_alloc (ninputs); | |
846 | |
847 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode | |
848 : GET_MODE (output_rtx[0])), | |
849 ggc_strdup (TREE_STRING_POINTER (string)), | |
850 empty_string, 0, argvec, constraintvec, | |
851 locus); | |
852 | |
853 MEM_VOLATILE_P (body) = vol; | |
854 | |
855 /* Eval the inputs and put them into ARGVEC. | |
856 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */ | |
857 | |
858 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i) | |
859 { | |
860 bool allows_reg, allows_mem; | |
861 const char *constraint; | |
862 tree val, type; | |
863 rtx op; | |
864 bool ok; | |
865 | |
866 constraint = constraints[i + noutputs]; | |
867 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout, | |
868 constraints, &allows_mem, &allows_reg); | |
869 gcc_assert (ok); | |
870 | |
871 generating_concat_p = 0; | |
872 | |
873 val = TREE_VALUE (tail); | |
874 type = TREE_TYPE (val); | |
875 /* EXPAND_INITIALIZER will not generate code for valid initializer | |
876 constants, but will still generate code for other types of operand. | |
877 This is the behavior we want for constant constraints. */ | |
878 op = expand_expr (val, NULL_RTX, VOIDmode, | |
879 allows_reg ? EXPAND_NORMAL | |
880 : allows_mem ? EXPAND_MEMORY | |
881 : EXPAND_INITIALIZER); | |
882 | |
883 /* Never pass a CONCAT to an ASM. */ | |
884 if (GET_CODE (op) == CONCAT) | |
885 op = force_reg (GET_MODE (op), op); | |
886 else if (MEM_P (op)) | |
887 op = validize_mem (op); | |
888 | |
889 if (asm_operand_ok (op, constraint, NULL) <= 0) | |
890 { | |
891 if (allows_reg && TYPE_MODE (type) != BLKmode) | |
892 op = force_reg (TYPE_MODE (type), op); | |
893 else if (!allows_mem) | |
894 warning (0, "asm operand %d probably doesn%'t match constraints", | |
895 i + noutputs); | |
896 else if (MEM_P (op)) | |
897 { | |
898 /* We won't recognize either volatile memory or memory | |
899 with a queued address as available a memory_operand | |
900 at this point. Ignore it: clearly this *is* a memory. */ | |
901 } | |
902 else | |
903 { | |
904 warning (0, "use of memory input without lvalue in " | |
905 "asm operand %d is deprecated", i + noutputs); | |
906 | |
907 if (CONSTANT_P (op)) | |
908 { | |
909 rtx mem = force_const_mem (TYPE_MODE (type), op); | |
910 if (mem) | |
911 op = validize_mem (mem); | |
912 else | |
913 op = force_reg (TYPE_MODE (type), op); | |
914 } | |
915 if (REG_P (op) | |
916 || GET_CODE (op) == SUBREG | |
917 || GET_CODE (op) == CONCAT) | |
918 { | |
919 tree qual_type = build_qualified_type (type, | |
920 (TYPE_QUALS (type) | |
921 | TYPE_QUAL_CONST)); | |
922 rtx memloc = assign_temp (qual_type, 1, 1, 1); | |
923 memloc = validize_mem (memloc); | |
924 emit_move_insn (memloc, op); | |
925 op = memloc; | |
926 } | |
927 } | |
928 } | |
929 | |
930 generating_concat_p = old_generating_concat_p; | |
931 ASM_OPERANDS_INPUT (body, i) = op; | |
932 | |
933 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i) | |
934 = gen_rtx_ASM_INPUT (TYPE_MODE (type), | |
935 ggc_strdup (constraints[i + noutputs])); | |
936 | |
937 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) | |
938 clobber_conflict_found = 1; | |
939 } | |
940 | |
941 /* Protect all the operands from the queue now that they have all been | |
942 evaluated. */ | |
943 | |
944 generating_concat_p = 0; | |
945 | |
946 /* For in-out operands, copy output rtx to input rtx. */ | |
947 for (i = 0; i < ninout; i++) | |
948 { | |
949 int j = inout_opnum[i]; | |
950 char buffer[16]; | |
951 | |
952 ASM_OPERANDS_INPUT (body, ninputs - ninout + i) | |
953 = output_rtx[j]; | |
954 | |
955 sprintf (buffer, "%d", j); | |
956 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i) | |
957 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer)); | |
958 } | |
959 | |
960 generating_concat_p = old_generating_concat_p; | |
961 | |
962 /* Now, for each output, construct an rtx | |
963 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER | |
964 ARGVEC CONSTRAINTS OPNAMES)) | |
965 If there is more than one, put them inside a PARALLEL. */ | |
966 | |
967 if (noutputs == 1 && nclobbers == 0) | |
968 { | |
969 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]); | |
970 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body)); | |
971 } | |
972 | |
973 else if (noutputs == 0 && nclobbers == 0) | |
974 { | |
975 /* No output operands: put in a raw ASM_OPERANDS rtx. */ | |
976 emit_insn (body); | |
977 } | |
978 | |
979 else | |
980 { | |
981 rtx obody = body; | |
982 int num = noutputs; | |
983 | |
984 if (num == 0) | |
985 num = 1; | |
986 | |
987 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers)); | |
988 | |
989 /* For each output operand, store a SET. */ | |
990 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
991 { | |
992 XVECEXP (body, 0, i) | |
993 = gen_rtx_SET (VOIDmode, | |
994 output_rtx[i], | |
995 gen_rtx_ASM_OPERANDS | |
996 (GET_MODE (output_rtx[i]), | |
997 ggc_strdup (TREE_STRING_POINTER (string)), | |
998 ggc_strdup (constraints[i]), | |
999 i, argvec, constraintvec, locus)); | |
1000 | |
1001 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol; | |
1002 } | |
1003 | |
1004 /* If there are no outputs (but there are some clobbers) | |
1005 store the bare ASM_OPERANDS into the PARALLEL. */ | |
1006 | |
1007 if (i == 0) | |
1008 XVECEXP (body, 0, i++) = obody; | |
1009 | |
1010 /* Store (clobber REG) for each clobbered register specified. */ | |
1011 | |
1012 for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) | |
1013 { | |
1014 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); | |
1015 int j = decode_reg_name (regname); | |
1016 rtx clobbered_reg; | |
1017 | |
1018 if (j < 0) | |
1019 { | |
1020 if (j == -3) /* `cc', which is not a register */ | |
1021 continue; | |
1022 | |
1023 if (j == -4) /* `memory', don't cache memory across asm */ | |
1024 { | |
1025 XVECEXP (body, 0, i++) | |
1026 = gen_rtx_CLOBBER (VOIDmode, | |
1027 gen_rtx_MEM | |
1028 (BLKmode, | |
1029 gen_rtx_SCRATCH (VOIDmode))); | |
1030 continue; | |
1031 } | |
1032 | |
1033 /* Ignore unknown register, error already signaled. */ | |
1034 continue; | |
1035 } | |
1036 | |
1037 /* Use QImode since that's guaranteed to clobber just one reg. */ | |
1038 clobbered_reg = gen_rtx_REG (QImode, j); | |
1039 | |
1040 /* Do sanity check for overlap between clobbers and respectively | |
1041 input and outputs that hasn't been handled. Such overlap | |
1042 should have been detected and reported above. */ | |
1043 if (!clobber_conflict_found) | |
1044 { | |
1045 int opno; | |
1046 | |
1047 /* We test the old body (obody) contents to avoid tripping | |
1048 over the under-construction body. */ | |
1049 for (opno = 0; opno < noutputs; opno++) | |
1050 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno])) | |
1051 internal_error ("asm clobber conflict with output operand"); | |
1052 | |
1053 for (opno = 0; opno < ninputs - ninout; opno++) | |
1054 if (reg_overlap_mentioned_p (clobbered_reg, | |
1055 ASM_OPERANDS_INPUT (obody, opno))) | |
1056 internal_error ("asm clobber conflict with input operand"); | |
1057 } | |
1058 | |
1059 XVECEXP (body, 0, i++) | |
1060 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); | |
1061 } | |
1062 | |
1063 emit_insn (body); | |
1064 } | |
1065 | |
1066 /* For any outputs that needed reloading into registers, spill them | |
1067 back to where they belong. */ | |
1068 for (i = 0; i < noutputs; ++i) | |
1069 if (real_output_rtx[i]) | |
1070 emit_move_insn (real_output_rtx[i], output_rtx[i]); | |
1071 | |
1072 crtl->has_asm_statement = 1; | |
1073 free_temp_slots (); | |
1074 } | |
1075 | |
1076 void | |
1077 expand_asm_expr (tree exp) | |
1078 { | |
1079 int noutputs, i; | |
1080 tree outputs, tail; | |
1081 tree *o; | |
1082 | |
1083 if (ASM_INPUT_P (exp)) | |
1084 { | |
1085 expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location); | |
1086 return; | |
1087 } | |
1088 | |
1089 outputs = ASM_OUTPUTS (exp); | |
1090 noutputs = list_length (outputs); | |
1091 /* o[I] is the place that output number I should be written. */ | |
1092 o = (tree *) alloca (noutputs * sizeof (tree)); | |
1093 | |
1094 /* Record the contents of OUTPUTS before it is modified. */ | |
1095 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
1096 o[i] = TREE_VALUE (tail); | |
1097 | |
1098 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of | |
1099 OUTPUTS some trees for where the values were actually stored. */ | |
1100 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp), | |
1101 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp), | |
1102 input_location); | |
1103 | |
1104 /* Copy all the intermediate outputs into the specified outputs. */ | |
1105 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
1106 { | |
1107 if (o[i] != TREE_VALUE (tail)) | |
1108 { | |
1109 expand_assignment (o[i], TREE_VALUE (tail), false); | |
1110 free_temp_slots (); | |
1111 | |
1112 /* Restore the original value so that it's correct the next | |
1113 time we expand this function. */ | |
1114 TREE_VALUE (tail) = o[i]; | |
1115 } | |
1116 } | |
1117 } | |
1118 | |
1119 /* A subroutine of expand_asm_operands. Check that all operands have | |
1120 the same number of alternatives. Return true if so. */ | |
1121 | |
1122 static bool | |
1123 check_operand_nalternatives (tree outputs, tree inputs) | |
1124 { | |
1125 if (outputs || inputs) | |
1126 { | |
1127 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs); | |
1128 int nalternatives | |
1129 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp))); | |
1130 tree next = inputs; | |
1131 | |
1132 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES) | |
1133 { | |
1134 error ("too many alternatives in %<asm%>"); | |
1135 return false; | |
1136 } | |
1137 | |
1138 tmp = outputs; | |
1139 while (tmp) | |
1140 { | |
1141 const char *constraint | |
1142 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp))); | |
1143 | |
1144 if (n_occurrences (',', constraint) != nalternatives) | |
1145 { | |
1146 error ("operand constraints for %<asm%> differ " | |
1147 "in number of alternatives"); | |
1148 return false; | |
1149 } | |
1150 | |
1151 if (TREE_CHAIN (tmp)) | |
1152 tmp = TREE_CHAIN (tmp); | |
1153 else | |
1154 tmp = next, next = 0; | |
1155 } | |
1156 } | |
1157 | |
1158 return true; | |
1159 } | |
1160 | |
1161 /* A subroutine of expand_asm_operands. Check that all operand names | |
1162 are unique. Return true if so. We rely on the fact that these names | |
1163 are identifiers, and so have been canonicalized by get_identifier, | |
1164 so all we need are pointer comparisons. */ | |
1165 | |
1166 static bool | |
1167 check_unique_operand_names (tree outputs, tree inputs) | |
1168 { | |
1169 tree i, j; | |
1170 | |
1171 for (i = outputs; i ; i = TREE_CHAIN (i)) | |
1172 { | |
1173 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); | |
1174 if (! i_name) | |
1175 continue; | |
1176 | |
1177 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) | |
1178 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1179 goto failure; | |
1180 } | |
1181 | |
1182 for (i = inputs; i ; i = TREE_CHAIN (i)) | |
1183 { | |
1184 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); | |
1185 if (! i_name) | |
1186 continue; | |
1187 | |
1188 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j)) | |
1189 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1190 goto failure; | |
1191 for (j = outputs; j ; j = TREE_CHAIN (j)) | |
1192 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j)))) | |
1193 goto failure; | |
1194 } | |
1195 | |
1196 return true; | |
1197 | |
1198 failure: | |
1199 error ("duplicate asm operand name %qs", | |
1200 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i)))); | |
1201 return false; | |
1202 } | |
1203 | |
1204 /* A subroutine of expand_asm_operands. Resolve the names of the operands | |
1205 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in | |
1206 STRING and in the constraints to those numbers. */ | |
1207 | |
1208 tree | |
1209 resolve_asm_operand_names (tree string, tree outputs, tree inputs) | |
1210 { | |
1211 char *buffer; | |
1212 char *p; | |
1213 const char *c; | |
1214 tree t; | |
1215 | |
1216 check_unique_operand_names (outputs, inputs); | |
1217 | |
1218 /* Substitute [<name>] in input constraint strings. There should be no | |
1219 named operands in output constraints. */ | |
1220 for (t = inputs; t ; t = TREE_CHAIN (t)) | |
1221 { | |
1222 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); | |
1223 if (strchr (c, '[') != NULL) | |
1224 { | |
1225 p = buffer = xstrdup (c); | |
1226 while ((p = strchr (p, '[')) != NULL) | |
1227 p = resolve_operand_name_1 (p, outputs, inputs); | |
1228 TREE_VALUE (TREE_PURPOSE (t)) | |
1229 = build_string (strlen (buffer), buffer); | |
1230 free (buffer); | |
1231 } | |
1232 } | |
1233 | |
1234 /* Now check for any needed substitutions in the template. */ | |
1235 c = TREE_STRING_POINTER (string); | |
1236 while ((c = strchr (c, '%')) != NULL) | |
1237 { | |
1238 if (c[1] == '[') | |
1239 break; | |
1240 else if (ISALPHA (c[1]) && c[2] == '[') | |
1241 break; | |
1242 else | |
1243 { | |
1244 c += 1; | |
1245 continue; | |
1246 } | |
1247 } | |
1248 | |
1249 if (c) | |
1250 { | |
1251 /* OK, we need to make a copy so we can perform the substitutions. | |
1252 Assume that we will not need extra space--we get to remove '[' | |
1253 and ']', which means we cannot have a problem until we have more | |
1254 than 999 operands. */ | |
1255 buffer = xstrdup (TREE_STRING_POINTER (string)); | |
1256 p = buffer + (c - TREE_STRING_POINTER (string)); | |
1257 | |
1258 while ((p = strchr (p, '%')) != NULL) | |
1259 { | |
1260 if (p[1] == '[') | |
1261 p += 1; | |
1262 else if (ISALPHA (p[1]) && p[2] == '[') | |
1263 p += 2; | |
1264 else | |
1265 { | |
1266 p += 1; | |
1267 continue; | |
1268 } | |
1269 | |
1270 p = resolve_operand_name_1 (p, outputs, inputs); | |
1271 } | |
1272 | |
1273 string = build_string (strlen (buffer), buffer); | |
1274 free (buffer); | |
1275 } | |
1276 | |
1277 return string; | |
1278 } | |
1279 | |
1280 /* A subroutine of resolve_operand_names. P points to the '[' for a | |
1281 potential named operand of the form [<name>]. In place, replace | |
1282 the name and brackets with a number. Return a pointer to the | |
1283 balance of the string after substitution. */ | |
1284 | |
1285 static char * | |
1286 resolve_operand_name_1 (char *p, tree outputs, tree inputs) | |
1287 { | |
1288 char *q; | |
1289 int op; | |
1290 tree t; | |
1291 size_t len; | |
1292 | |
1293 /* Collect the operand name. */ | |
1294 q = strchr (p, ']'); | |
1295 if (!q) | |
1296 { | |
1297 error ("missing close brace for named operand"); | |
1298 return strchr (p, '\0'); | |
1299 } | |
1300 len = q - p - 1; | |
1301 | |
1302 /* Resolve the name to a number. */ | |
1303 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++) | |
1304 { | |
1305 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); | |
1306 if (name) | |
1307 { | |
1308 const char *c = TREE_STRING_POINTER (name); | |
1309 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') | |
1310 goto found; | |
1311 } | |
1312 } | |
1313 for (t = inputs; t ; t = TREE_CHAIN (t), op++) | |
1314 { | |
1315 tree name = TREE_PURPOSE (TREE_PURPOSE (t)); | |
1316 if (name) | |
1317 { | |
1318 const char *c = TREE_STRING_POINTER (name); | |
1319 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0') | |
1320 goto found; | |
1321 } | |
1322 } | |
1323 | |
1324 *q = '\0'; | |
1325 error ("undefined named operand %qs", p + 1); | |
1326 op = 0; | |
1327 found: | |
1328 | |
1329 /* Replace the name with the number. Unfortunately, not all libraries | |
1330 get the return value of sprintf correct, so search for the end of the | |
1331 generated string by hand. */ | |
1332 sprintf (p, "%d", op); | |
1333 p = strchr (p, '\0'); | |
1334 | |
1335 /* Verify the no extra buffer space assumption. */ | |
1336 gcc_assert (p <= q); | |
1337 | |
1338 /* Shift the rest of the buffer down to fill the gap. */ | |
1339 memmove (p, q + 1, strlen (q + 1) + 1); | |
1340 | |
1341 return p; | |
1342 } | |
1343 | |
1344 /* Generate RTL to evaluate the expression EXP. */ | |
1345 | |
1346 void | |
1347 expand_expr_stmt (tree exp) | |
1348 { | |
1349 rtx value; | |
1350 tree type; | |
1351 | |
1352 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1353 type = TREE_TYPE (exp); | |
1354 | |
1355 /* If all we do is reference a volatile value in memory, | |
1356 copy it to a register to be sure it is actually touched. */ | |
1357 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp)) | |
1358 { | |
1359 if (TYPE_MODE (type) == VOIDmode) | |
1360 ; | |
1361 else if (TYPE_MODE (type) != BLKmode) | |
1362 value = copy_to_reg (value); | |
1363 else | |
1364 { | |
1365 rtx lab = gen_label_rtx (); | |
1366 | |
1367 /* Compare the value with itself to reference it. */ | |
1368 emit_cmp_and_jump_insns (value, value, EQ, | |
1369 expand_normal (TYPE_SIZE (type)), | |
1370 BLKmode, 0, lab); | |
1371 emit_label (lab); | |
1372 } | |
1373 } | |
1374 | |
1375 /* Free any temporaries used to evaluate this expression. */ | |
1376 free_temp_slots (); | |
1377 } | |
1378 | |
1379 /* Warn if EXP contains any computations whose results are not used. | |
1380 Return 1 if a warning is printed; 0 otherwise. LOCUS is the | |
1381 (potential) location of the expression. */ | |
1382 | |
1383 int | |
1384 warn_if_unused_value (const_tree exp, location_t locus) | |
1385 { | |
1386 restart: | |
1387 if (TREE_USED (exp) || TREE_NO_WARNING (exp)) | |
1388 return 0; | |
1389 | |
1390 /* Don't warn about void constructs. This includes casting to void, | |
1391 void function calls, and statement expressions with a final cast | |
1392 to void. */ | |
1393 if (VOID_TYPE_P (TREE_TYPE (exp))) | |
1394 return 0; | |
1395 | |
1396 if (EXPR_HAS_LOCATION (exp)) | |
1397 locus = EXPR_LOCATION (exp); | |
1398 | |
1399 switch (TREE_CODE (exp)) | |
1400 { | |
1401 case PREINCREMENT_EXPR: | |
1402 case POSTINCREMENT_EXPR: | |
1403 case PREDECREMENT_EXPR: | |
1404 case POSTDECREMENT_EXPR: | |
1405 case MODIFY_EXPR: | |
1406 case INIT_EXPR: | |
1407 case TARGET_EXPR: | |
1408 case CALL_EXPR: | |
1409 case TRY_CATCH_EXPR: | |
1410 case WITH_CLEANUP_EXPR: | |
1411 case EXIT_EXPR: | |
1412 case VA_ARG_EXPR: | |
1413 return 0; | |
1414 | |
1415 case BIND_EXPR: | |
1416 /* For a binding, warn if no side effect within it. */ | |
1417 exp = BIND_EXPR_BODY (exp); | |
1418 goto restart; | |
1419 | |
1420 case SAVE_EXPR: | |
1421 exp = TREE_OPERAND (exp, 0); | |
1422 goto restart; | |
1423 | |
1424 case TRUTH_ORIF_EXPR: | |
1425 case TRUTH_ANDIF_EXPR: | |
1426 /* In && or ||, warn if 2nd operand has no side effect. */ | |
1427 exp = TREE_OPERAND (exp, 1); | |
1428 goto restart; | |
1429 | |
1430 case COMPOUND_EXPR: | |
1431 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus)) | |
1432 return 1; | |
1433 /* Let people do `(foo (), 0)' without a warning. */ | |
1434 if (TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
1435 return 0; | |
1436 exp = TREE_OPERAND (exp, 1); | |
1437 goto restart; | |
1438 | |
1439 case COND_EXPR: | |
1440 /* If this is an expression with side effects, don't warn; this | |
1441 case commonly appears in macro expansions. */ | |
1442 if (TREE_SIDE_EFFECTS (exp)) | |
1443 return 0; | |
1444 goto warn; | |
1445 | |
1446 case INDIRECT_REF: | |
1447 /* Don't warn about automatic dereferencing of references, since | |
1448 the user cannot control it. */ | |
1449 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE) | |
1450 { | |
1451 exp = TREE_OPERAND (exp, 0); | |
1452 goto restart; | |
1453 } | |
1454 /* Fall through. */ | |
1455 | |
1456 default: | |
1457 /* Referencing a volatile value is a side effect, so don't warn. */ | |
1458 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp)) | |
1459 && TREE_THIS_VOLATILE (exp)) | |
1460 return 0; | |
1461 | |
1462 /* If this is an expression which has no operands, there is no value | |
1463 to be unused. There are no such language-independent codes, | |
1464 but front ends may define such. */ | |
1465 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0) | |
1466 return 0; | |
1467 | |
1468 warn: | |
1469 warning (OPT_Wunused_value, "%Hvalue computed is not used", &locus); | |
1470 return 1; | |
1471 } | |
1472 } | |
1473 | |
1474 | |
1475 /* Generate RTL to return from the current function, with no value. | |
1476 (That is, we do not do anything about returning any value.) */ | |
1477 | |
1478 void | |
1479 expand_null_return (void) | |
1480 { | |
1481 /* If this function was declared to return a value, but we | |
1482 didn't, clobber the return registers so that they are not | |
1483 propagated live to the rest of the function. */ | |
1484 clobber_return_register (); | |
1485 | |
1486 expand_null_return_1 (); | |
1487 } | |
1488 | |
1489 /* Generate RTL to return directly from the current function. | |
1490 (That is, we bypass any return value.) */ | |
1491 | |
1492 void | |
1493 expand_naked_return (void) | |
1494 { | |
1495 rtx end_label; | |
1496 | |
1497 clear_pending_stack_adjust (); | |
1498 do_pending_stack_adjust (); | |
1499 | |
1500 end_label = naked_return_label; | |
1501 if (end_label == 0) | |
1502 end_label = naked_return_label = gen_label_rtx (); | |
1503 | |
1504 emit_jump (end_label); | |
1505 } | |
1506 | |
1507 /* Generate RTL to return from the current function, with value VAL. */ | |
1508 | |
1509 static void | |
1510 expand_value_return (rtx val) | |
1511 { | |
1512 /* Copy the value to the return location | |
1513 unless it's already there. */ | |
1514 | |
1515 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)); | |
1516 if (return_reg != val) | |
1517 { | |
1518 tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
1519 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) | |
1520 { | |
1521 int unsignedp = TYPE_UNSIGNED (type); | |
1522 enum machine_mode old_mode | |
1523 = DECL_MODE (DECL_RESULT (current_function_decl)); | |
1524 enum machine_mode mode | |
1525 = promote_mode (type, old_mode, &unsignedp, 1); | |
1526 | |
1527 if (mode != old_mode) | |
1528 val = convert_modes (mode, old_mode, val, unsignedp); | |
1529 } | |
1530 if (GET_CODE (return_reg) == PARALLEL) | |
1531 emit_group_load (return_reg, val, type, int_size_in_bytes (type)); | |
1532 else | |
1533 emit_move_insn (return_reg, val); | |
1534 } | |
1535 | |
1536 expand_null_return_1 (); | |
1537 } | |
1538 | |
1539 /* Output a return with no value. */ | |
1540 | |
1541 static void | |
1542 expand_null_return_1 (void) | |
1543 { | |
1544 clear_pending_stack_adjust (); | |
1545 do_pending_stack_adjust (); | |
1546 emit_jump (return_label); | |
1547 } | |
1548 | |
1549 /* Generate RTL to evaluate the expression RETVAL and return it | |
1550 from the current function. */ | |
1551 | |
1552 void | |
1553 expand_return (tree retval) | |
1554 { | |
1555 rtx result_rtl; | |
1556 rtx val = 0; | |
1557 tree retval_rhs; | |
1558 | |
1559 /* If function wants no value, give it none. */ | |
1560 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) | |
1561 { | |
1562 expand_normal (retval); | |
1563 expand_null_return (); | |
1564 return; | |
1565 } | |
1566 | |
1567 if (retval == error_mark_node) | |
1568 { | |
1569 /* Treat this like a return of no value from a function that | |
1570 returns a value. */ | |
1571 expand_null_return (); | |
1572 return; | |
1573 } | |
1574 else if ((TREE_CODE (retval) == MODIFY_EXPR | |
1575 || TREE_CODE (retval) == INIT_EXPR) | |
1576 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) | |
1577 retval_rhs = TREE_OPERAND (retval, 1); | |
1578 else | |
1579 retval_rhs = retval; | |
1580 | |
1581 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl)); | |
1582 | |
1583 /* If we are returning the RESULT_DECL, then the value has already | |
1584 been stored into it, so we don't have to do anything special. */ | |
1585 if (TREE_CODE (retval_rhs) == RESULT_DECL) | |
1586 expand_value_return (result_rtl); | |
1587 | |
1588 /* If the result is an aggregate that is being returned in one (or more) | |
1589 registers, load the registers here. The compiler currently can't handle | |
1590 copying a BLKmode value into registers. We could put this code in a | |
1591 more general area (for use by everyone instead of just function | |
1592 call/return), but until this feature is generally usable it is kept here | |
1593 (and in expand_call). */ | |
1594 | |
1595 else if (retval_rhs != 0 | |
1596 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode | |
1597 && REG_P (result_rtl)) | |
1598 { | |
1599 int i; | |
1600 unsigned HOST_WIDE_INT bitpos, xbitpos; | |
1601 unsigned HOST_WIDE_INT padding_correction = 0; | |
1602 unsigned HOST_WIDE_INT bytes | |
1603 = int_size_in_bytes (TREE_TYPE (retval_rhs)); | |
1604 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | |
1605 unsigned int bitsize | |
1606 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD); | |
1607 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs); | |
1608 rtx result_reg, src = NULL_RTX, dst = NULL_RTX; | |
1609 rtx result_val = expand_normal (retval_rhs); | |
1610 enum machine_mode tmpmode, result_reg_mode; | |
1611 | |
1612 if (bytes == 0) | |
1613 { | |
1614 expand_null_return (); | |
1615 return; | |
1616 } | |
1617 | |
1618 /* If the structure doesn't take up a whole number of words, see | |
1619 whether the register value should be padded on the left or on | |
1620 the right. Set PADDING_CORRECTION to the number of padding | |
1621 bits needed on the left side. | |
1622 | |
1623 In most ABIs, the structure will be returned at the least end of | |
1624 the register, which translates to right padding on little-endian | |
1625 targets and left padding on big-endian targets. The opposite | |
1626 holds if the structure is returned at the most significant | |
1627 end of the register. */ | |
1628 if (bytes % UNITS_PER_WORD != 0 | |
1629 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs)) | |
1630 ? !BYTES_BIG_ENDIAN | |
1631 : BYTES_BIG_ENDIAN)) | |
1632 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) | |
1633 * BITS_PER_UNIT)); | |
1634 | |
1635 /* Copy the structure BITSIZE bits at a time. */ | |
1636 for (bitpos = 0, xbitpos = padding_correction; | |
1637 bitpos < bytes * BITS_PER_UNIT; | |
1638 bitpos += bitsize, xbitpos += bitsize) | |
1639 { | |
1640 /* We need a new destination pseudo each time xbitpos is | |
1641 on a word boundary and when xbitpos == padding_correction | |
1642 (the first time through). */ | |
1643 if (xbitpos % BITS_PER_WORD == 0 | |
1644 || xbitpos == padding_correction) | |
1645 { | |
1646 /* Generate an appropriate register. */ | |
1647 dst = gen_reg_rtx (word_mode); | |
1648 result_pseudos[xbitpos / BITS_PER_WORD] = dst; | |
1649 | |
1650 /* Clear the destination before we move anything into it. */ | |
1651 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst))); | |
1652 } | |
1653 | |
1654 /* We need a new source operand each time bitpos is on a word | |
1655 boundary. */ | |
1656 if (bitpos % BITS_PER_WORD == 0) | |
1657 src = operand_subword_force (result_val, | |
1658 bitpos / BITS_PER_WORD, | |
1659 BLKmode); | |
1660 | |
1661 /* Use bitpos for the source extraction (left justified) and | |
1662 xbitpos for the destination store (right justified). */ | |
1663 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode, | |
1664 extract_bit_field (src, bitsize, | |
1665 bitpos % BITS_PER_WORD, 1, | |
1666 NULL_RTX, word_mode, word_mode)); | |
1667 } | |
1668 | |
1669 tmpmode = GET_MODE (result_rtl); | |
1670 if (tmpmode == BLKmode) | |
1671 { | |
1672 /* Find the smallest integer mode large enough to hold the | |
1673 entire structure and use that mode instead of BLKmode | |
1674 on the USE insn for the return register. */ | |
1675 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
1676 tmpmode != VOIDmode; | |
1677 tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1678 /* Have we found a large enough mode? */ | |
1679 if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1680 break; | |
1681 | |
1682 /* A suitable mode should have been found. */ | |
1683 gcc_assert (tmpmode != VOIDmode); | |
1684 | |
1685 PUT_MODE (result_rtl, tmpmode); | |
1686 } | |
1687 | |
1688 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode)) | |
1689 result_reg_mode = word_mode; | |
1690 else | |
1691 result_reg_mode = tmpmode; | |
1692 result_reg = gen_reg_rtx (result_reg_mode); | |
1693 | |
1694 for (i = 0; i < n_regs; i++) | |
1695 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode), | |
1696 result_pseudos[i]); | |
1697 | |
1698 if (tmpmode != result_reg_mode) | |
1699 result_reg = gen_lowpart (tmpmode, result_reg); | |
1700 | |
1701 expand_value_return (result_reg); | |
1702 } | |
1703 else if (retval_rhs != 0 | |
1704 && !VOID_TYPE_P (TREE_TYPE (retval_rhs)) | |
1705 && (REG_P (result_rtl) | |
1706 || (GET_CODE (result_rtl) == PARALLEL))) | |
1707 { | |
1708 /* Calculate the return value into a temporary (usually a pseudo | |
1709 reg). */ | |
1710 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
1711 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST); | |
1712 | |
1713 val = assign_temp (nt, 0, 0, 1); | |
1714 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL); | |
1715 val = force_not_mem (val); | |
1716 /* Return the calculated value. */ | |
1717 expand_value_return (val); | |
1718 } | |
1719 else | |
1720 { | |
1721 /* No hard reg used; calculate value into hard return reg. */ | |
1722 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1723 expand_value_return (result_rtl); | |
1724 } | |
1725 } | |
1726 | |
1727 /* Emit code to restore vital registers at the beginning of a nonlocal goto | |
1728 handler. */ | |
1729 static void | |
1730 expand_nl_goto_receiver (void) | |
1731 { | |
1732 /* Clobber the FP when we get here, so we have to make sure it's | |
1733 marked as used by this function. */ | |
1734 emit_use (hard_frame_pointer_rtx); | |
1735 | |
1736 /* Mark the static chain as clobbered here so life information | |
1737 doesn't get messed up for it. */ | |
1738 emit_clobber (static_chain_rtx); | |
1739 | |
1740 #ifdef HAVE_nonlocal_goto | |
1741 if (! HAVE_nonlocal_goto) | |
1742 #endif | |
1743 /* First adjust our frame pointer to its actual value. It was | |
1744 previously set to the start of the virtual area corresponding to | |
1745 the stacked variables when we branched here and now needs to be | |
1746 adjusted to the actual hardware fp value. | |
1747 | |
1748 Assignments are to virtual registers are converted by | |
1749 instantiate_virtual_regs into the corresponding assignment | |
1750 to the underlying register (fp in this case) that makes | |
1751 the original assignment true. | |
1752 So the following insn will actually be | |
1753 decrementing fp by STARTING_FRAME_OFFSET. */ | |
1754 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
1755 | |
1756 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
1757 if (fixed_regs[ARG_POINTER_REGNUM]) | |
1758 { | |
1759 #ifdef ELIMINABLE_REGS | |
1760 /* If the argument pointer can be eliminated in favor of the | |
1761 frame pointer, we don't need to restore it. We assume here | |
1762 that if such an elimination is present, it can always be used. | |
1763 This is the case on all known machines; if we don't make this | |
1764 assumption, we do unnecessary saving on many machines. */ | |
1765 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
1766 size_t i; | |
1767 | |
1768 for (i = 0; i < ARRAY_SIZE (elim_regs); i++) | |
1769 if (elim_regs[i].from == ARG_POINTER_REGNUM | |
1770 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
1771 break; | |
1772 | |
1773 if (i == ARRAY_SIZE (elim_regs)) | |
1774 #endif | |
1775 { | |
1776 /* Now restore our arg pointer from the address at which it | |
1777 was saved in our stack frame. */ | |
1778 emit_move_insn (crtl->args.internal_arg_pointer, | |
1779 copy_to_reg (get_arg_pointer_save_area ())); | |
1780 } | |
1781 } | |
1782 #endif | |
1783 | |
1784 #ifdef HAVE_nonlocal_goto_receiver | |
1785 if (HAVE_nonlocal_goto_receiver) | |
1786 emit_insn (gen_nonlocal_goto_receiver ()); | |
1787 #endif | |
1788 | |
1789 /* We must not allow the code we just generated to be reordered by | |
1790 scheduling. Specifically, the update of the frame pointer must | |
1791 happen immediately, not later. */ | |
1792 emit_insn (gen_blockage ()); | |
1793 } | |
1794 | |
1795 /* Generate RTL for the automatic variable declaration DECL. | |
1796 (Other kinds of declarations are simply ignored if seen here.) */ | |
1797 | |
1798 void | |
1799 expand_decl (tree decl) | |
1800 { | |
1801 tree type; | |
1802 | |
1803 type = TREE_TYPE (decl); | |
1804 | |
1805 /* For a CONST_DECL, set mode, alignment, and sizes from those of the | |
1806 type in case this node is used in a reference. */ | |
1807 if (TREE_CODE (decl) == CONST_DECL) | |
1808 { | |
1809 DECL_MODE (decl) = TYPE_MODE (type); | |
1810 DECL_ALIGN (decl) = TYPE_ALIGN (type); | |
1811 DECL_SIZE (decl) = TYPE_SIZE (type); | |
1812 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | |
1813 return; | |
1814 } | |
1815 | |
1816 /* Otherwise, only automatic variables need any expansion done. Static and | |
1817 external variables, and external functions, will be handled by | |
1818 `assemble_variable' (called from finish_decl). TYPE_DECL requires | |
1819 nothing. PARM_DECLs are handled in `assign_parms'. */ | |
1820 if (TREE_CODE (decl) != VAR_DECL) | |
1821 return; | |
1822 | |
1823 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) | |
1824 return; | |
1825 | |
1826 /* Create the RTL representation for the variable. */ | |
1827 | |
1828 if (type == error_mark_node) | |
1829 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx)); | |
1830 | |
1831 else if (DECL_SIZE (decl) == 0) | |
1832 { | |
1833 /* Variable with incomplete type. */ | |
1834 rtx x; | |
1835 if (DECL_INITIAL (decl) == 0) | |
1836 /* Error message was already done; now avoid a crash. */ | |
1837 x = gen_rtx_MEM (BLKmode, const0_rtx); | |
1838 else | |
1839 /* An initializer is going to decide the size of this array. | |
1840 Until we know the size, represent its address with a reg. */ | |
1841 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode)); | |
1842 | |
1843 set_mem_attributes (x, decl, 1); | |
1844 SET_DECL_RTL (decl, x); | |
1845 } | |
1846 else if (use_register_for_decl (decl)) | |
1847 { | |
1848 /* Automatic variable that can go in a register. */ | |
1849 int unsignedp = TYPE_UNSIGNED (type); | |
1850 enum machine_mode reg_mode | |
1851 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0); | |
1852 | |
1853 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode)); | |
1854 | |
1855 /* Note if the object is a user variable. */ | |
1856 if (!DECL_ARTIFICIAL (decl)) | |
1857 mark_user_reg (DECL_RTL (decl)); | |
1858 | |
1859 if (POINTER_TYPE_P (type)) | |
1860 mark_reg_pointer (DECL_RTL (decl), | |
1861 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); | |
1862 } | |
1863 | |
1864 else | |
1865 { | |
1866 rtx oldaddr = 0; | |
1867 rtx addr; | |
1868 rtx x; | |
1869 | |
1870 /* Variable-sized decls are dealt with in the gimplifier. */ | |
1871 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST); | |
1872 | |
1873 /* If we previously made RTL for this decl, it must be an array | |
1874 whose size was determined by the initializer. | |
1875 The old address was a register; set that register now | |
1876 to the proper address. */ | |
1877 if (DECL_RTL_SET_P (decl)) | |
1878 { | |
1879 gcc_assert (MEM_P (DECL_RTL (decl))); | |
1880 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0))); | |
1881 oldaddr = XEXP (DECL_RTL (decl), 0); | |
1882 } | |
1883 | |
1884 /* Set alignment we actually gave this decl. */ | |
1885 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT | |
1886 : GET_MODE_BITSIZE (DECL_MODE (decl))); | |
1887 DECL_USER_ALIGN (decl) = 0; | |
1888 | |
1889 x = assign_temp (decl, 1, 1, 1); | |
1890 set_mem_attributes (x, decl, 1); | |
1891 SET_DECL_RTL (decl, x); | |
1892 | |
1893 if (oldaddr) | |
1894 { | |
1895 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr); | |
1896 if (addr != oldaddr) | |
1897 emit_move_insn (oldaddr, addr); | |
1898 } | |
1899 } | |
1900 } | |
1901 | |
1902 /* Emit code to save the current value of stack. */ | |
1903 rtx | |
1904 expand_stack_save (void) | |
1905 { | |
1906 rtx ret = NULL_RTX; | |
1907 | |
1908 do_pending_stack_adjust (); | |
1909 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX); | |
1910 return ret; | |
1911 } | |
1912 | |
1913 /* Emit code to restore the current value of stack. */ | |
1914 void | |
1915 expand_stack_restore (tree var) | |
1916 { | |
1917 rtx sa = expand_normal (var); | |
1918 | |
1919 sa = convert_memory_address (Pmode, sa); | |
1920 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX); | |
1921 } | |
1922 | |
1923 /* Do the insertion of a case label into case_list. The labels are | |
1924 fed to us in descending order from the sorted vector of case labels used | |
1925 in the tree part of the middle end. So the list we construct is | |
1926 sorted in ascending order. The bounds on the case range, LOW and HIGH, | |
1927 are converted to case's index type TYPE. */ | |
1928 | |
1929 static struct case_node * | |
1930 add_case_node (struct case_node *head, tree type, tree low, tree high, | |
1931 tree label, alloc_pool case_node_pool) | |
1932 { | |
1933 tree min_value, max_value; | |
1934 struct case_node *r; | |
1935 | |
1936 gcc_assert (TREE_CODE (low) == INTEGER_CST); | |
1937 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST); | |
1938 | |
1939 min_value = TYPE_MIN_VALUE (type); | |
1940 max_value = TYPE_MAX_VALUE (type); | |
1941 | |
1942 /* If there's no HIGH value, then this is not a case range; it's | |
1943 just a simple case label. But that's just a degenerate case | |
1944 range. | |
1945 If the bounds are equal, turn this into the one-value case. */ | |
1946 if (!high || tree_int_cst_equal (low, high)) | |
1947 { | |
1948 /* If the simple case value is unreachable, ignore it. */ | |
1949 if ((TREE_CODE (min_value) == INTEGER_CST | |
1950 && tree_int_cst_compare (low, min_value) < 0) | |
1951 || (TREE_CODE (max_value) == INTEGER_CST | |
1952 && tree_int_cst_compare (low, max_value) > 0)) | |
1953 return head; | |
1954 low = fold_convert (type, low); | |
1955 high = low; | |
1956 } | |
1957 else | |
1958 { | |
1959 /* If the entire case range is unreachable, ignore it. */ | |
1960 if ((TREE_CODE (min_value) == INTEGER_CST | |
1961 && tree_int_cst_compare (high, min_value) < 0) | |
1962 || (TREE_CODE (max_value) == INTEGER_CST | |
1963 && tree_int_cst_compare (low, max_value) > 0)) | |
1964 return head; | |
1965 | |
1966 /* If the lower bound is less than the index type's minimum | |
1967 value, truncate the range bounds. */ | |
1968 if (TREE_CODE (min_value) == INTEGER_CST | |
1969 && tree_int_cst_compare (low, min_value) < 0) | |
1970 low = min_value; | |
1971 low = fold_convert (type, low); | |
1972 | |
1973 /* If the upper bound is greater than the index type's maximum | |
1974 value, truncate the range bounds. */ | |
1975 if (TREE_CODE (max_value) == INTEGER_CST | |
1976 && tree_int_cst_compare (high, max_value) > 0) | |
1977 high = max_value; | |
1978 high = fold_convert (type, high); | |
1979 } | |
1980 | |
1981 | |
1982 /* Add this label to the chain. Make sure to drop overflow flags. */ | |
1983 r = (struct case_node *) pool_alloc (case_node_pool); | |
1984 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low), | |
1985 TREE_INT_CST_HIGH (low)); | |
1986 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high), | |
1987 TREE_INT_CST_HIGH (high)); | |
1988 r->code_label = label; | |
1989 r->parent = r->left = NULL; | |
1990 r->right = head; | |
1991 return r; | |
1992 } | |
1993 | |
1994 /* Maximum number of case bit tests. */ | |
1995 #define MAX_CASE_BIT_TESTS 3 | |
1996 | |
1997 /* By default, enable case bit tests on targets with ashlsi3. */ | |
1998 #ifndef CASE_USE_BIT_TESTS | |
1999 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \ | |
2000 != CODE_FOR_nothing) | |
2001 #endif | |
2002 | |
2003 | |
2004 /* A case_bit_test represents a set of case nodes that may be | |
2005 selected from using a bit-wise comparison. HI and LO hold | |
2006 the integer to be tested against, LABEL contains the label | |
2007 to jump to upon success and BITS counts the number of case | |
2008 nodes handled by this test, typically the number of bits | |
2009 set in HI:LO. */ | |
2010 | |
2011 struct case_bit_test | |
2012 { | |
2013 HOST_WIDE_INT hi; | |
2014 HOST_WIDE_INT lo; | |
2015 rtx label; | |
2016 int bits; | |
2017 }; | |
2018 | |
2019 /* Determine whether "1 << x" is relatively cheap in word_mode. */ | |
2020 | |
2021 static | |
2022 bool lshift_cheap_p (void) | |
2023 { | |
2024 static bool init = false; | |
2025 static bool cheap = true; | |
2026 | |
2027 if (!init) | |
2028 { | |
2029 rtx reg = gen_rtx_REG (word_mode, 10000); | |
2030 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET, | |
2031 optimize_insn_for_speed_p ()); | |
2032 cheap = cost < COSTS_N_INSNS (3); | |
2033 init = true; | |
2034 } | |
2035 | |
2036 return cheap; | |
2037 } | |
2038 | |
2039 /* Comparison function for qsort to order bit tests by decreasing | |
2040 number of case nodes, i.e. the node with the most cases gets | |
2041 tested first. */ | |
2042 | |
2043 static int | |
2044 case_bit_test_cmp (const void *p1, const void *p2) | |
2045 { | |
2046 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1; | |
2047 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2; | |
2048 | |
2049 if (d2->bits != d1->bits) | |
2050 return d2->bits - d1->bits; | |
2051 | |
2052 /* Stabilize the sort. */ | |
2053 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label); | |
2054 } | |
2055 | |
2056 /* Expand a switch statement by a short sequence of bit-wise | |
2057 comparisons. "switch(x)" is effectively converted into | |
2058 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are | |
2059 integer constants. | |
2060 | |
2061 INDEX_EXPR is the value being switched on, which is of | |
2062 type INDEX_TYPE. MINVAL is the lowest case value of in | |
2063 the case nodes, of INDEX_TYPE type, and RANGE is highest | |
2064 value minus MINVAL, also of type INDEX_TYPE. NODES is | |
2065 the set of case nodes, and DEFAULT_LABEL is the label to | |
2066 branch to should none of the cases match. | |
2067 | |
2068 There *MUST* be MAX_CASE_BIT_TESTS or less unique case | |
2069 node targets. */ | |
2070 | |
2071 static void | |
2072 emit_case_bit_tests (tree index_type, tree index_expr, tree minval, | |
2073 tree range, case_node_ptr nodes, rtx default_label) | |
2074 { | |
2075 struct case_bit_test test[MAX_CASE_BIT_TESTS]; | |
2076 enum machine_mode mode; | |
2077 rtx expr, index, label; | |
2078 unsigned int i,j,lo,hi; | |
2079 struct case_node *n; | |
2080 unsigned int count; | |
2081 | |
2082 count = 0; | |
2083 for (n = nodes; n; n = n->right) | |
2084 { | |
2085 label = label_rtx (n->code_label); | |
2086 for (i = 0; i < count; i++) | |
2087 if (label == test[i].label) | |
2088 break; | |
2089 | |
2090 if (i == count) | |
2091 { | |
2092 gcc_assert (count < MAX_CASE_BIT_TESTS); | |
2093 test[i].hi = 0; | |
2094 test[i].lo = 0; | |
2095 test[i].label = label; | |
2096 test[i].bits = 1; | |
2097 count++; | |
2098 } | |
2099 else | |
2100 test[i].bits++; | |
2101 | |
2102 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2103 n->low, minval), 1); | |
2104 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2105 n->high, minval), 1); | |
2106 for (j = lo; j <= hi; j++) | |
2107 if (j >= HOST_BITS_PER_WIDE_INT) | |
2108 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT); | |
2109 else | |
2110 test[i].lo |= (HOST_WIDE_INT) 1 << j; | |
2111 } | |
2112 | |
2113 qsort (test, count, sizeof(*test), case_bit_test_cmp); | |
2114 | |
2115 index_expr = fold_build2 (MINUS_EXPR, index_type, | |
2116 fold_convert (index_type, index_expr), | |
2117 fold_convert (index_type, minval)); | |
2118 index = expand_normal (index_expr); | |
2119 do_pending_stack_adjust (); | |
2120 | |
2121 mode = TYPE_MODE (index_type); | |
2122 expr = expand_normal (range); | |
2123 if (default_label) | |
2124 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1, | |
2125 default_label); | |
2126 | |
2127 index = convert_to_mode (word_mode, index, 0); | |
2128 index = expand_binop (word_mode, ashl_optab, const1_rtx, | |
2129 index, NULL_RTX, 1, OPTAB_WIDEN); | |
2130 | |
2131 for (i = 0; i < count; i++) | |
2132 { | |
2133 expr = immed_double_const (test[i].lo, test[i].hi, word_mode); | |
2134 expr = expand_binop (word_mode, and_optab, index, expr, | |
2135 NULL_RTX, 1, OPTAB_WIDEN); | |
2136 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX, | |
2137 word_mode, 1, test[i].label); | |
2138 } | |
2139 | |
2140 if (default_label) | |
2141 emit_jump (default_label); | |
2142 } | |
2143 | |
2144 #ifndef HAVE_casesi | |
2145 #define HAVE_casesi 0 | |
2146 #endif | |
2147 | |
2148 #ifndef HAVE_tablejump | |
2149 #define HAVE_tablejump 0 | |
2150 #endif | |
2151 | |
2152 /* Terminate a case (Pascal/Ada) or switch (C) statement | |
2153 in which ORIG_INDEX is the expression to be tested. | |
2154 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX | |
2155 type as given in the source before any compiler conversions. | |
2156 Generate the code to test it and jump to the right place. */ | |
2157 | |
2158 void | |
2159 expand_case (tree exp) | |
2160 { | |
2161 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE; | |
2162 rtx default_label = 0; | |
2163 struct case_node *n; | |
2164 unsigned int count, uniq; | |
2165 rtx index; | |
2166 rtx table_label; | |
2167 int ncases; | |
2168 rtx *labelvec; | |
2169 int i; | |
2170 rtx before_case, end, lab; | |
2171 | |
2172 tree vec = SWITCH_LABELS (exp); | |
2173 tree orig_type = TREE_TYPE (exp); | |
2174 tree index_expr = SWITCH_COND (exp); | |
2175 tree index_type = TREE_TYPE (index_expr); | |
2176 int unsignedp = TYPE_UNSIGNED (index_type); | |
2177 | |
2178 /* The insn after which the case dispatch should finally | |
2179 be emitted. Zero for a dummy. */ | |
2180 rtx start; | |
2181 | |
2182 /* A list of case labels; it is first built as a list and it may then | |
2183 be rearranged into a nearly balanced binary tree. */ | |
2184 struct case_node *case_list = 0; | |
2185 | |
2186 /* Label to jump to if no case matches. */ | |
2187 tree default_label_decl = NULL_TREE; | |
2188 | |
2189 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool", | |
2190 sizeof (struct case_node), | |
2191 100); | |
2192 | |
2193 /* The switch body is lowered in gimplify.c, we should never have | |
2194 switches with a non-NULL SWITCH_BODY here. */ | |
2195 gcc_assert (!SWITCH_BODY (exp)); | |
2196 gcc_assert (SWITCH_LABELS (exp)); | |
2197 | |
2198 do_pending_stack_adjust (); | |
2199 | |
2200 /* An ERROR_MARK occurs for various reasons including invalid data type. */ | |
2201 if (index_type != error_mark_node) | |
2202 { | |
2203 tree elt; | |
2204 bitmap label_bitmap; | |
2205 int vl = TREE_VEC_LENGTH (vec); | |
2206 | |
2207 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index | |
2208 expressions being INTEGER_CST. */ | |
2209 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST); | |
2210 | |
2211 /* The default case, if ever taken, is at the end of TREE_VEC. */ | |
2212 elt = TREE_VEC_ELT (vec, vl - 1); | |
2213 if (!CASE_LOW (elt) && !CASE_HIGH (elt)) | |
2214 { | |
2215 default_label_decl = CASE_LABEL (elt); | |
2216 --vl; | |
2217 } | |
2218 | |
2219 for (i = vl - 1; i >= 0; --i) | |
2220 { | |
2221 tree low, high; | |
2222 elt = TREE_VEC_ELT (vec, i); | |
2223 | |
2224 low = CASE_LOW (elt); | |
2225 gcc_assert (low); | |
2226 high = CASE_HIGH (elt); | |
2227 | |
2228 /* Discard empty ranges. */ | |
2229 if (high && tree_int_cst_lt (high, low)) | |
2230 continue; | |
2231 | |
2232 case_list = add_case_node (case_list, index_type, low, high, | |
2233 CASE_LABEL (elt), case_node_pool); | |
2234 } | |
2235 | |
2236 | |
2237 before_case = start = get_last_insn (); | |
2238 if (default_label_decl) | |
2239 default_label = label_rtx (default_label_decl); | |
2240 | |
2241 /* Get upper and lower bounds of case values. */ | |
2242 | |
2243 uniq = 0; | |
2244 count = 0; | |
2245 label_bitmap = BITMAP_ALLOC (NULL); | |
2246 for (n = case_list; n; n = n->right) | |
2247 { | |
2248 /* Count the elements and track the largest and smallest | |
2249 of them (treating them as signed even if they are not). */ | |
2250 if (count++ == 0) | |
2251 { | |
2252 minval = n->low; | |
2253 maxval = n->high; | |
2254 } | |
2255 else | |
2256 { | |
2257 if (tree_int_cst_lt (n->low, minval)) | |
2258 minval = n->low; | |
2259 if (tree_int_cst_lt (maxval, n->high)) | |
2260 maxval = n->high; | |
2261 } | |
2262 /* A range counts double, since it requires two compares. */ | |
2263 if (! tree_int_cst_equal (n->low, n->high)) | |
2264 count++; | |
2265 | |
2266 /* If we have not seen this label yet, then increase the | |
2267 number of unique case node targets seen. */ | |
2268 lab = label_rtx (n->code_label); | |
2269 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab))) | |
2270 { | |
2271 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)); | |
2272 uniq++; | |
2273 } | |
2274 } | |
2275 | |
2276 BITMAP_FREE (label_bitmap); | |
2277 | |
2278 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single | |
2279 destination, such as one with a default case only. However, | |
2280 it doesn't remove cases that are out of range for the switch | |
2281 type, so we may still get a zero here. */ | |
2282 if (count == 0) | |
2283 { | |
2284 if (default_label) | |
2285 emit_jump (default_label); | |
2286 free_alloc_pool (case_node_pool); | |
2287 return; | |
2288 } | |
2289 | |
2290 /* Compute span of values. */ | |
2291 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval); | |
2292 | |
2293 /* Try implementing this switch statement by a short sequence of | |
2294 bit-wise comparisons. However, we let the binary-tree case | |
2295 below handle constant index expressions. */ | |
2296 if (CASE_USE_BIT_TESTS | |
2297 && ! TREE_CONSTANT (index_expr) | |
2298 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0 | |
2299 && compare_tree_int (range, 0) > 0 | |
2300 && lshift_cheap_p () | |
2301 && ((uniq == 1 && count >= 3) | |
2302 || (uniq == 2 && count >= 5) | |
2303 || (uniq == 3 && count >= 6))) | |
2304 { | |
2305 /* Optimize the case where all the case values fit in a | |
2306 word without having to subtract MINVAL. In this case, | |
2307 we can optimize away the subtraction. */ | |
2308 if (compare_tree_int (minval, 0) > 0 | |
2309 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0) | |
2310 { | |
2311 minval = build_int_cst (index_type, 0); | |
2312 range = maxval; | |
2313 } | |
2314 emit_case_bit_tests (index_type, index_expr, minval, range, | |
2315 case_list, default_label); | |
2316 } | |
2317 | |
2318 /* If range of values is much bigger than number of values, | |
2319 make a sequence of conditional branches instead of a dispatch. | |
2320 If the switch-index is a constant, do it this way | |
2321 because we can optimize it. */ | |
2322 | |
2323 else if (count < case_values_threshold () | |
2324 || compare_tree_int (range, | |
2325 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0 | |
2326 /* RANGE may be signed, and really large ranges will show up | |
2327 as negative numbers. */ | |
2328 || compare_tree_int (range, 0) < 0 | |
2329 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT | |
2330 || flag_pic | |
2331 #endif | |
2332 || !flag_jump_tables | |
2333 || TREE_CONSTANT (index_expr) | |
2334 /* If neither casesi or tablejump is available, we can | |
2335 only go this way. */ | |
2336 || (!HAVE_casesi && !HAVE_tablejump)) | |
2337 { | |
2338 index = expand_normal (index_expr); | |
2339 | |
2340 /* If the index is a short or char that we do not have | |
2341 an insn to handle comparisons directly, convert it to | |
2342 a full integer now, rather than letting each comparison | |
2343 generate the conversion. */ | |
2344 | |
2345 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT | |
2346 && ! have_insn_for (COMPARE, GET_MODE (index))) | |
2347 { | |
2348 enum machine_mode wider_mode; | |
2349 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode; | |
2350 wider_mode = GET_MODE_WIDER_MODE (wider_mode)) | |
2351 if (have_insn_for (COMPARE, wider_mode)) | |
2352 { | |
2353 index = convert_to_mode (wider_mode, index, unsignedp); | |
2354 break; | |
2355 } | |
2356 } | |
2357 | |
2358 do_pending_stack_adjust (); | |
2359 | |
2360 if (MEM_P (index)) | |
2361 index = copy_to_reg (index); | |
2362 | |
2363 /* We generate a binary decision tree to select the | |
2364 appropriate target code. This is done as follows: | |
2365 | |
2366 The list of cases is rearranged into a binary tree, | |
2367 nearly optimal assuming equal probability for each case. | |
2368 | |
2369 The tree is transformed into RTL, eliminating | |
2370 redundant test conditions at the same time. | |
2371 | |
2372 If program flow could reach the end of the | |
2373 decision tree an unconditional jump to the | |
2374 default code is emitted. */ | |
2375 | |
2376 use_cost_table | |
2377 = (TREE_CODE (orig_type) != ENUMERAL_TYPE | |
2378 && estimate_case_costs (case_list)); | |
2379 balance_case_nodes (&case_list, NULL); | |
2380 emit_case_nodes (index, case_list, default_label, index_type); | |
2381 if (default_label) | |
2382 emit_jump (default_label); | |
2383 } | |
2384 else | |
2385 { | |
2386 rtx fallback_label = label_rtx (case_list->code_label); | |
2387 table_label = gen_label_rtx (); | |
2388 if (! try_casesi (index_type, index_expr, minval, range, | |
2389 table_label, default_label, fallback_label)) | |
2390 { | |
2391 bool ok; | |
2392 | |
2393 /* Index jumptables from zero for suitable values of | |
2394 minval to avoid a subtraction. */ | |
2395 if (optimize_insn_for_speed_p () | |
2396 && compare_tree_int (minval, 0) > 0 | |
2397 && compare_tree_int (minval, 3) < 0) | |
2398 { | |
2399 minval = build_int_cst (index_type, 0); | |
2400 range = maxval; | |
2401 } | |
2402 | |
2403 ok = try_tablejump (index_type, index_expr, minval, range, | |
2404 table_label, default_label); | |
2405 gcc_assert (ok); | |
2406 } | |
2407 | |
2408 /* Get table of labels to jump to, in order of case index. */ | |
2409 | |
2410 ncases = tree_low_cst (range, 0) + 1; | |
2411 labelvec = XALLOCAVEC (rtx, ncases); | |
2412 memset (labelvec, 0, ncases * sizeof (rtx)); | |
2413 | |
2414 for (n = case_list; n; n = n->right) | |
2415 { | |
2416 /* Compute the low and high bounds relative to the minimum | |
2417 value since that should fit in a HOST_WIDE_INT while the | |
2418 actual values may not. */ | |
2419 HOST_WIDE_INT i_low | |
2420 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2421 n->low, minval), 1); | |
2422 HOST_WIDE_INT i_high | |
2423 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type, | |
2424 n->high, minval), 1); | |
2425 HOST_WIDE_INT i; | |
2426 | |
2427 for (i = i_low; i <= i_high; i ++) | |
2428 labelvec[i] | |
2429 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label)); | |
2430 } | |
2431 | |
2432 /* Fill in the gaps with the default. We may have gaps at | |
2433 the beginning if we tried to avoid the minval subtraction, | |
2434 so substitute some label even if the default label was | |
2435 deemed unreachable. */ | |
2436 if (!default_label) | |
2437 default_label = fallback_label; | |
2438 for (i = 0; i < ncases; i++) | |
2439 if (labelvec[i] == 0) | |
2440 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label); | |
2441 | |
2442 /* Output the table. */ | |
2443 emit_label (table_label); | |
2444 | |
2445 if (CASE_VECTOR_PC_RELATIVE || flag_pic) | |
2446 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE, | |
2447 gen_rtx_LABEL_REF (Pmode, table_label), | |
2448 gen_rtvec_v (ncases, labelvec), | |
2449 const0_rtx, const0_rtx)); | |
2450 else | |
2451 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE, | |
2452 gen_rtvec_v (ncases, labelvec))); | |
2453 | |
2454 /* Record no drop-through after the table. */ | |
2455 emit_barrier (); | |
2456 } | |
2457 | |
2458 before_case = NEXT_INSN (before_case); | |
2459 end = get_last_insn (); | |
2460 reorder_insns (before_case, end, start); | |
2461 } | |
2462 | |
2463 free_temp_slots (); | |
2464 free_alloc_pool (case_node_pool); | |
2465 } | |
2466 | |
2467 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */ | |
2468 | |
2469 static void | |
2470 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label, | |
2471 int unsignedp) | |
2472 { | |
2473 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode, | |
2474 NULL_RTX, NULL_RTX, label); | |
2475 } | |
2476 | |
2477 /* Not all case values are encountered equally. This function | |
2478 uses a heuristic to weight case labels, in cases where that | |
2479 looks like a reasonable thing to do. | |
2480 | |
2481 Right now, all we try to guess is text, and we establish the | |
2482 following weights: | |
2483 | |
2484 chars above space: 16 | |
2485 digits: 16 | |
2486 default: 12 | |
2487 space, punct: 8 | |
2488 tab: 4 | |
2489 newline: 2 | |
2490 other "\" chars: 1 | |
2491 remaining chars: 0 | |
2492 | |
2493 If we find any cases in the switch that are not either -1 or in the range | |
2494 of valid ASCII characters, or are control characters other than those | |
2495 commonly used with "\", don't treat this switch scanning text. | |
2496 | |
2497 Return 1 if these nodes are suitable for cost estimation, otherwise | |
2498 return 0. */ | |
2499 | |
2500 static int | |
2501 estimate_case_costs (case_node_ptr node) | |
2502 { | |
2503 tree min_ascii = integer_minus_one_node; | |
2504 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127); | |
2505 case_node_ptr n; | |
2506 int i; | |
2507 | |
2508 /* If we haven't already made the cost table, make it now. Note that the | |
2509 lower bound of the table is -1, not zero. */ | |
2510 | |
2511 if (! cost_table_initialized) | |
2512 { | |
2513 cost_table_initialized = 1; | |
2514 | |
2515 for (i = 0; i < 128; i++) | |
2516 { | |
2517 if (ISALNUM (i)) | |
2518 COST_TABLE (i) = 16; | |
2519 else if (ISPUNCT (i)) | |
2520 COST_TABLE (i) = 8; | |
2521 else if (ISCNTRL (i)) | |
2522 COST_TABLE (i) = -1; | |
2523 } | |
2524 | |
2525 COST_TABLE (' ') = 8; | |
2526 COST_TABLE ('\t') = 4; | |
2527 COST_TABLE ('\0') = 4; | |
2528 COST_TABLE ('\n') = 2; | |
2529 COST_TABLE ('\f') = 1; | |
2530 COST_TABLE ('\v') = 1; | |
2531 COST_TABLE ('\b') = 1; | |
2532 } | |
2533 | |
2534 /* See if all the case expressions look like text. It is text if the | |
2535 constant is >= -1 and the highest constant is <= 127. Do all comparisons | |
2536 as signed arithmetic since we don't want to ever access cost_table with a | |
2537 value less than -1. Also check that none of the constants in a range | |
2538 are strange control characters. */ | |
2539 | |
2540 for (n = node; n; n = n->right) | |
2541 { | |
2542 if (tree_int_cst_lt (n->low, min_ascii) | |
2543 || tree_int_cst_lt (max_ascii, n->high)) | |
2544 return 0; | |
2545 | |
2546 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low); | |
2547 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++) | |
2548 if (COST_TABLE (i) < 0) | |
2549 return 0; | |
2550 } | |
2551 | |
2552 /* All interesting values are within the range of interesting | |
2553 ASCII characters. */ | |
2554 return 1; | |
2555 } | |
2556 | |
2557 /* Take an ordered list of case nodes | |
2558 and transform them into a near optimal binary tree, | |
2559 on the assumption that any target code selection value is as | |
2560 likely as any other. | |
2561 | |
2562 The transformation is performed by splitting the ordered | |
2563 list into two equal sections plus a pivot. The parts are | |
2564 then attached to the pivot as left and right branches. Each | |
2565 branch is then transformed recursively. */ | |
2566 | |
2567 static void | |
2568 balance_case_nodes (case_node_ptr *head, case_node_ptr parent) | |
2569 { | |
2570 case_node_ptr np; | |
2571 | |
2572 np = *head; | |
2573 if (np) | |
2574 { | |
2575 int cost = 0; | |
2576 int i = 0; | |
2577 int ranges = 0; | |
2578 case_node_ptr *npp; | |
2579 case_node_ptr left; | |
2580 | |
2581 /* Count the number of entries on branch. Also count the ranges. */ | |
2582 | |
2583 while (np) | |
2584 { | |
2585 if (!tree_int_cst_equal (np->low, np->high)) | |
2586 { | |
2587 ranges++; | |
2588 if (use_cost_table) | |
2589 cost += COST_TABLE (TREE_INT_CST_LOW (np->high)); | |
2590 } | |
2591 | |
2592 if (use_cost_table) | |
2593 cost += COST_TABLE (TREE_INT_CST_LOW (np->low)); | |
2594 | |
2595 i++; | |
2596 np = np->right; | |
2597 } | |
2598 | |
2599 if (i > 2) | |
2600 { | |
2601 /* Split this list if it is long enough for that to help. */ | |
2602 npp = head; | |
2603 left = *npp; | |
2604 if (use_cost_table) | |
2605 { | |
2606 /* Find the place in the list that bisects the list's total cost, | |
2607 Here I gets half the total cost. */ | |
2608 int n_moved = 0; | |
2609 i = (cost + 1) / 2; | |
2610 while (1) | |
2611 { | |
2612 /* Skip nodes while their cost does not reach that amount. */ | |
2613 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) | |
2614 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high)); | |
2615 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low)); | |
2616 if (i <= 0) | |
2617 break; | |
2618 npp = &(*npp)->right; | |
2619 n_moved += 1; | |
2620 } | |
2621 if (n_moved == 0) | |
2622 { | |
2623 /* Leave this branch lopsided, but optimize left-hand | |
2624 side and fill in `parent' fields for right-hand side. */ | |
2625 np = *head; | |
2626 np->parent = parent; | |
2627 balance_case_nodes (&np->left, np); | |
2628 for (; np->right; np = np->right) | |
2629 np->right->parent = np; | |
2630 return; | |
2631 } | |
2632 } | |
2633 /* If there are just three nodes, split at the middle one. */ | |
2634 else if (i == 3) | |
2635 npp = &(*npp)->right; | |
2636 else | |
2637 { | |
2638 /* Find the place in the list that bisects the list's total cost, | |
2639 where ranges count as 2. | |
2640 Here I gets half the total cost. */ | |
2641 i = (i + ranges + 1) / 2; | |
2642 while (1) | |
2643 { | |
2644 /* Skip nodes while their cost does not reach that amount. */ | |
2645 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) | |
2646 i--; | |
2647 i--; | |
2648 if (i <= 0) | |
2649 break; | |
2650 npp = &(*npp)->right; | |
2651 } | |
2652 } | |
2653 *head = np = *npp; | |
2654 *npp = 0; | |
2655 np->parent = parent; | |
2656 np->left = left; | |
2657 | |
2658 /* Optimize each of the two split parts. */ | |
2659 balance_case_nodes (&np->left, np); | |
2660 balance_case_nodes (&np->right, np); | |
2661 } | |
2662 else | |
2663 { | |
2664 /* Else leave this branch as one level, | |
2665 but fill in `parent' fields. */ | |
2666 np = *head; | |
2667 np->parent = parent; | |
2668 for (; np->right; np = np->right) | |
2669 np->right->parent = np; | |
2670 } | |
2671 } | |
2672 } | |
2673 | |
2674 /* Search the parent sections of the case node tree | |
2675 to see if a test for the lower bound of NODE would be redundant. | |
2676 INDEX_TYPE is the type of the index expression. | |
2677 | |
2678 The instructions to generate the case decision tree are | |
2679 output in the same order as nodes are processed so it is | |
2680 known that if a parent node checks the range of the current | |
2681 node minus one that the current node is bounded at its lower | |
2682 span. Thus the test would be redundant. */ | |
2683 | |
2684 static int | |
2685 node_has_low_bound (case_node_ptr node, tree index_type) | |
2686 { | |
2687 tree low_minus_one; | |
2688 case_node_ptr pnode; | |
2689 | |
2690 /* If the lower bound of this node is the lowest value in the index type, | |
2691 we need not test it. */ | |
2692 | |
2693 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type))) | |
2694 return 1; | |
2695 | |
2696 /* If this node has a left branch, the value at the left must be less | |
2697 than that at this node, so it cannot be bounded at the bottom and | |
2698 we need not bother testing any further. */ | |
2699 | |
2700 if (node->left) | |
2701 return 0; | |
2702 | |
2703 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low), | |
2704 node->low, | |
2705 build_int_cst (TREE_TYPE (node->low), 1)); | |
2706 | |
2707 /* If the subtraction above overflowed, we can't verify anything. | |
2708 Otherwise, look for a parent that tests our value - 1. */ | |
2709 | |
2710 if (! tree_int_cst_lt (low_minus_one, node->low)) | |
2711 return 0; | |
2712 | |
2713 for (pnode = node->parent; pnode; pnode = pnode->parent) | |
2714 if (tree_int_cst_equal (low_minus_one, pnode->high)) | |
2715 return 1; | |
2716 | |
2717 return 0; | |
2718 } | |
2719 | |
2720 /* Search the parent sections of the case node tree | |
2721 to see if a test for the upper bound of NODE would be redundant. | |
2722 INDEX_TYPE is the type of the index expression. | |
2723 | |
2724 The instructions to generate the case decision tree are | |
2725 output in the same order as nodes are processed so it is | |
2726 known that if a parent node checks the range of the current | |
2727 node plus one that the current node is bounded at its upper | |
2728 span. Thus the test would be redundant. */ | |
2729 | |
2730 static int | |
2731 node_has_high_bound (case_node_ptr node, tree index_type) | |
2732 { | |
2733 tree high_plus_one; | |
2734 case_node_ptr pnode; | |
2735 | |
2736 /* If there is no upper bound, obviously no test is needed. */ | |
2737 | |
2738 if (TYPE_MAX_VALUE (index_type) == NULL) | |
2739 return 1; | |
2740 | |
2741 /* If the upper bound of this node is the highest value in the type | |
2742 of the index expression, we need not test against it. */ | |
2743 | |
2744 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type))) | |
2745 return 1; | |
2746 | |
2747 /* If this node has a right branch, the value at the right must be greater | |
2748 than that at this node, so it cannot be bounded at the top and | |
2749 we need not bother testing any further. */ | |
2750 | |
2751 if (node->right) | |
2752 return 0; | |
2753 | |
2754 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high), | |
2755 node->high, | |
2756 build_int_cst (TREE_TYPE (node->high), 1)); | |
2757 | |
2758 /* If the addition above overflowed, we can't verify anything. | |
2759 Otherwise, look for a parent that tests our value + 1. */ | |
2760 | |
2761 if (! tree_int_cst_lt (node->high, high_plus_one)) | |
2762 return 0; | |
2763 | |
2764 for (pnode = node->parent; pnode; pnode = pnode->parent) | |
2765 if (tree_int_cst_equal (high_plus_one, pnode->low)) | |
2766 return 1; | |
2767 | |
2768 return 0; | |
2769 } | |
2770 | |
2771 /* Search the parent sections of the | |
2772 case node tree to see if both tests for the upper and lower | |
2773 bounds of NODE would be redundant. */ | |
2774 | |
2775 static int | |
2776 node_is_bounded (case_node_ptr node, tree index_type) | |
2777 { | |
2778 return (node_has_low_bound (node, index_type) | |
2779 && node_has_high_bound (node, index_type)); | |
2780 } | |
2781 | |
2782 /* Emit step-by-step code to select a case for the value of INDEX. | |
2783 The thus generated decision tree follows the form of the | |
2784 case-node binary tree NODE, whose nodes represent test conditions. | |
2785 INDEX_TYPE is the type of the index of the switch. | |
2786 | |
2787 Care is taken to prune redundant tests from the decision tree | |
2788 by detecting any boundary conditions already checked by | |
2789 emitted rtx. (See node_has_high_bound, node_has_low_bound | |
2790 and node_is_bounded, above.) | |
2791 | |
2792 Where the test conditions can be shown to be redundant we emit | |
2793 an unconditional jump to the target code. As a further | |
2794 optimization, the subordinates of a tree node are examined to | |
2795 check for bounded nodes. In this case conditional and/or | |
2796 unconditional jumps as a result of the boundary check for the | |
2797 current node are arranged to target the subordinates associated | |
2798 code for out of bound conditions on the current node. | |
2799 | |
2800 We can assume that when control reaches the code generated here, | |
2801 the index value has already been compared with the parents | |
2802 of this node, and determined to be on the same side of each parent | |
2803 as this node is. Thus, if this node tests for the value 51, | |
2804 and a parent tested for 52, we don't need to consider | |
2805 the possibility of a value greater than 51. If another parent | |
2806 tests for the value 50, then this node need not test anything. */ | |
2807 | |
2808 static void | |
2809 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label, | |
2810 tree index_type) | |
2811 { | |
2812 /* If INDEX has an unsigned type, we must make unsigned branches. */ | |
2813 int unsignedp = TYPE_UNSIGNED (index_type); | |
2814 enum machine_mode mode = GET_MODE (index); | |
2815 enum machine_mode imode = TYPE_MODE (index_type); | |
2816 | |
2817 /* Handle indices detected as constant during RTL expansion. */ | |
2818 if (mode == VOIDmode) | |
2819 mode = imode; | |
2820 | |
2821 /* See if our parents have already tested everything for us. | |
2822 If they have, emit an unconditional jump for this node. */ | |
2823 if (node_is_bounded (node, index_type)) | |
2824 emit_jump (label_rtx (node->code_label)); | |
2825 | |
2826 else if (tree_int_cst_equal (node->low, node->high)) | |
2827 { | |
2828 /* Node is single valued. First see if the index expression matches | |
2829 this node and then check our children, if any. */ | |
2830 | |
2831 do_jump_if_equal (mode, index, | |
2832 convert_modes (mode, imode, | |
2833 expand_normal (node->low), | |
2834 unsignedp), | |
2835 label_rtx (node->code_label), unsignedp); | |
2836 | |
2837 if (node->right != 0 && node->left != 0) | |
2838 { | |
2839 /* This node has children on both sides. | |
2840 Dispatch to one side or the other | |
2841 by comparing the index value with this node's value. | |
2842 If one subtree is bounded, check that one first, | |
2843 so we can avoid real branches in the tree. */ | |
2844 | |
2845 if (node_is_bounded (node->right, index_type)) | |
2846 { | |
2847 emit_cmp_and_jump_insns (index, | |
2848 convert_modes | |
2849 (mode, imode, | |
2850 expand_normal (node->high), | |
2851 unsignedp), | |
2852 GT, NULL_RTX, mode, unsignedp, | |
2853 label_rtx (node->right->code_label)); | |
2854 emit_case_nodes (index, node->left, default_label, index_type); | |
2855 } | |
2856 | |
2857 else if (node_is_bounded (node->left, index_type)) | |
2858 { | |
2859 emit_cmp_and_jump_insns (index, | |
2860 convert_modes | |
2861 (mode, imode, | |
2862 expand_normal (node->high), | |
2863 unsignedp), | |
2864 LT, NULL_RTX, mode, unsignedp, | |
2865 label_rtx (node->left->code_label)); | |
2866 emit_case_nodes (index, node->right, default_label, index_type); | |
2867 } | |
2868 | |
2869 /* If both children are single-valued cases with no | |
2870 children, finish up all the work. This way, we can save | |
2871 one ordered comparison. */ | |
2872 else if (tree_int_cst_equal (node->right->low, node->right->high) | |
2873 && node->right->left == 0 | |
2874 && node->right->right == 0 | |
2875 && tree_int_cst_equal (node->left->low, node->left->high) | |
2876 && node->left->left == 0 | |
2877 && node->left->right == 0) | |
2878 { | |
2879 /* Neither node is bounded. First distinguish the two sides; | |
2880 then emit the code for one side at a time. */ | |
2881 | |
2882 /* See if the value matches what the right hand side | |
2883 wants. */ | |
2884 do_jump_if_equal (mode, index, | |
2885 convert_modes (mode, imode, | |
2886 expand_normal (node->right->low), | |
2887 unsignedp), | |
2888 label_rtx (node->right->code_label), | |
2889 unsignedp); | |
2890 | |
2891 /* See if the value matches what the left hand side | |
2892 wants. */ | |
2893 do_jump_if_equal (mode, index, | |
2894 convert_modes (mode, imode, | |
2895 expand_normal (node->left->low), | |
2896 unsignedp), | |
2897 label_rtx (node->left->code_label), | |
2898 unsignedp); | |
2899 } | |
2900 | |
2901 else | |
2902 { | |
2903 /* Neither node is bounded. First distinguish the two sides; | |
2904 then emit the code for one side at a time. */ | |
2905 | |
2906 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
2907 | |
2908 /* See if the value is on the right. */ | |
2909 emit_cmp_and_jump_insns (index, | |
2910 convert_modes | |
2911 (mode, imode, | |
2912 expand_normal (node->high), | |
2913 unsignedp), | |
2914 GT, NULL_RTX, mode, unsignedp, | |
2915 label_rtx (test_label)); | |
2916 | |
2917 /* Value must be on the left. | |
2918 Handle the left-hand subtree. */ | |
2919 emit_case_nodes (index, node->left, default_label, index_type); | |
2920 /* If left-hand subtree does nothing, | |
2921 go to default. */ | |
2922 if (default_label) | |
2923 emit_jump (default_label); | |
2924 | |
2925 /* Code branches here for the right-hand subtree. */ | |
2926 expand_label (test_label); | |
2927 emit_case_nodes (index, node->right, default_label, index_type); | |
2928 } | |
2929 } | |
2930 | |
2931 else if (node->right != 0 && node->left == 0) | |
2932 { | |
2933 /* Here we have a right child but no left so we issue a conditional | |
2934 branch to default and process the right child. | |
2935 | |
2936 Omit the conditional branch to default if the right child | |
2937 does not have any children and is single valued; it would | |
2938 cost too much space to save so little time. */ | |
2939 | |
2940 if (node->right->right || node->right->left | |
2941 || !tree_int_cst_equal (node->right->low, node->right->high)) | |
2942 { | |
2943 if (!node_has_low_bound (node, index_type)) | |
2944 { | |
2945 emit_cmp_and_jump_insns (index, | |
2946 convert_modes | |
2947 (mode, imode, | |
2948 expand_normal (node->high), | |
2949 unsignedp), | |
2950 LT, NULL_RTX, mode, unsignedp, | |
2951 default_label); | |
2952 } | |
2953 | |
2954 emit_case_nodes (index, node->right, default_label, index_type); | |
2955 } | |
2956 else | |
2957 /* We cannot process node->right normally | |
2958 since we haven't ruled out the numbers less than | |
2959 this node's value. So handle node->right explicitly. */ | |
2960 do_jump_if_equal (mode, index, | |
2961 convert_modes | |
2962 (mode, imode, | |
2963 expand_normal (node->right->low), | |
2964 unsignedp), | |
2965 label_rtx (node->right->code_label), unsignedp); | |
2966 } | |
2967 | |
2968 else if (node->right == 0 && node->left != 0) | |
2969 { | |
2970 /* Just one subtree, on the left. */ | |
2971 if (node->left->left || node->left->right | |
2972 || !tree_int_cst_equal (node->left->low, node->left->high)) | |
2973 { | |
2974 if (!node_has_high_bound (node, index_type)) | |
2975 { | |
2976 emit_cmp_and_jump_insns (index, | |
2977 convert_modes | |
2978 (mode, imode, | |
2979 expand_normal (node->high), | |
2980 unsignedp), | |
2981 GT, NULL_RTX, mode, unsignedp, | |
2982 default_label); | |
2983 } | |
2984 | |
2985 emit_case_nodes (index, node->left, default_label, index_type); | |
2986 } | |
2987 else | |
2988 /* We cannot process node->left normally | |
2989 since we haven't ruled out the numbers less than | |
2990 this node's value. So handle node->left explicitly. */ | |
2991 do_jump_if_equal (mode, index, | |
2992 convert_modes | |
2993 (mode, imode, | |
2994 expand_normal (node->left->low), | |
2995 unsignedp), | |
2996 label_rtx (node->left->code_label), unsignedp); | |
2997 } | |
2998 } | |
2999 else | |
3000 { | |
3001 /* Node is a range. These cases are very similar to those for a single | |
3002 value, except that we do not start by testing whether this node | |
3003 is the one to branch to. */ | |
3004 | |
3005 if (node->right != 0 && node->left != 0) | |
3006 { | |
3007 /* Node has subtrees on both sides. | |
3008 If the right-hand subtree is bounded, | |
3009 test for it first, since we can go straight there. | |
3010 Otherwise, we need to make a branch in the control structure, | |
3011 then handle the two subtrees. */ | |
3012 tree test_label = 0; | |
3013 | |
3014 if (node_is_bounded (node->right, index_type)) | |
3015 /* Right hand node is fully bounded so we can eliminate any | |
3016 testing and branch directly to the target code. */ | |
3017 emit_cmp_and_jump_insns (index, | |
3018 convert_modes | |
3019 (mode, imode, | |
3020 expand_normal (node->high), | |
3021 unsignedp), | |
3022 GT, NULL_RTX, mode, unsignedp, | |
3023 label_rtx (node->right->code_label)); | |
3024 else | |
3025 { | |
3026 /* Right hand node requires testing. | |
3027 Branch to a label where we will handle it later. */ | |
3028 | |
3029 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
3030 emit_cmp_and_jump_insns (index, | |
3031 convert_modes | |
3032 (mode, imode, | |
3033 expand_normal (node->high), | |
3034 unsignedp), | |
3035 GT, NULL_RTX, mode, unsignedp, | |
3036 label_rtx (test_label)); | |
3037 } | |
3038 | |
3039 /* Value belongs to this node or to the left-hand subtree. */ | |
3040 | |
3041 emit_cmp_and_jump_insns (index, | |
3042 convert_modes | |
3043 (mode, imode, | |
3044 expand_normal (node->low), | |
3045 unsignedp), | |
3046 GE, NULL_RTX, mode, unsignedp, | |
3047 label_rtx (node->code_label)); | |
3048 | |
3049 /* Handle the left-hand subtree. */ | |
3050 emit_case_nodes (index, node->left, default_label, index_type); | |
3051 | |
3052 /* If right node had to be handled later, do that now. */ | |
3053 | |
3054 if (test_label) | |
3055 { | |
3056 /* If the left-hand subtree fell through, | |
3057 don't let it fall into the right-hand subtree. */ | |
3058 if (default_label) | |
3059 emit_jump (default_label); | |
3060 | |
3061 expand_label (test_label); | |
3062 emit_case_nodes (index, node->right, default_label, index_type); | |
3063 } | |
3064 } | |
3065 | |
3066 else if (node->right != 0 && node->left == 0) | |
3067 { | |
3068 /* Deal with values to the left of this node, | |
3069 if they are possible. */ | |
3070 if (!node_has_low_bound (node, index_type)) | |
3071 { | |
3072 emit_cmp_and_jump_insns (index, | |
3073 convert_modes | |
3074 (mode, imode, | |
3075 expand_normal (node->low), | |
3076 unsignedp), | |
3077 LT, NULL_RTX, mode, unsignedp, | |
3078 default_label); | |
3079 } | |
3080 | |
3081 /* Value belongs to this node or to the right-hand subtree. */ | |
3082 | |
3083 emit_cmp_and_jump_insns (index, | |
3084 convert_modes | |
3085 (mode, imode, | |
3086 expand_normal (node->high), | |
3087 unsignedp), | |
3088 LE, NULL_RTX, mode, unsignedp, | |
3089 label_rtx (node->code_label)); | |
3090 | |
3091 emit_case_nodes (index, node->right, default_label, index_type); | |
3092 } | |
3093 | |
3094 else if (node->right == 0 && node->left != 0) | |
3095 { | |
3096 /* Deal with values to the right of this node, | |
3097 if they are possible. */ | |
3098 if (!node_has_high_bound (node, index_type)) | |
3099 { | |
3100 emit_cmp_and_jump_insns (index, | |
3101 convert_modes | |
3102 (mode, imode, | |
3103 expand_normal (node->high), | |
3104 unsignedp), | |
3105 GT, NULL_RTX, mode, unsignedp, | |
3106 default_label); | |
3107 } | |
3108 | |
3109 /* Value belongs to this node or to the left-hand subtree. */ | |
3110 | |
3111 emit_cmp_and_jump_insns (index, | |
3112 convert_modes | |
3113 (mode, imode, | |
3114 expand_normal (node->low), | |
3115 unsignedp), | |
3116 GE, NULL_RTX, mode, unsignedp, | |
3117 label_rtx (node->code_label)); | |
3118 | |
3119 emit_case_nodes (index, node->left, default_label, index_type); | |
3120 } | |
3121 | |
3122 else | |
3123 { | |
3124 /* Node has no children so we check low and high bounds to remove | |
3125 redundant tests. Only one of the bounds can exist, | |
3126 since otherwise this node is bounded--a case tested already. */ | |
3127 int high_bound = node_has_high_bound (node, index_type); | |
3128 int low_bound = node_has_low_bound (node, index_type); | |
3129 | |
3130 if (!high_bound && low_bound) | |
3131 { | |
3132 emit_cmp_and_jump_insns (index, | |
3133 convert_modes | |
3134 (mode, imode, | |
3135 expand_normal (node->high), | |
3136 unsignedp), | |
3137 GT, NULL_RTX, mode, unsignedp, | |
3138 default_label); | |
3139 } | |
3140 | |
3141 else if (!low_bound && high_bound) | |
3142 { | |
3143 emit_cmp_and_jump_insns (index, | |
3144 convert_modes | |
3145 (mode, imode, | |
3146 expand_normal (node->low), | |
3147 unsignedp), | |
3148 LT, NULL_RTX, mode, unsignedp, | |
3149 default_label); | |
3150 } | |
3151 else if (!low_bound && !high_bound) | |
3152 { | |
3153 /* Widen LOW and HIGH to the same width as INDEX. */ | |
3154 tree type = lang_hooks.types.type_for_mode (mode, unsignedp); | |
3155 tree low = build1 (CONVERT_EXPR, type, node->low); | |
3156 tree high = build1 (CONVERT_EXPR, type, node->high); | |
3157 rtx low_rtx, new_index, new_bound; | |
3158 | |
3159 /* Instead of doing two branches, emit one unsigned branch for | |
3160 (index-low) > (high-low). */ | |
3161 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL); | |
3162 new_index = expand_simple_binop (mode, MINUS, index, low_rtx, | |
3163 NULL_RTX, unsignedp, | |
3164 OPTAB_WIDEN); | |
3165 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type, | |
3166 high, low), | |
3167 NULL_RTX, mode, EXPAND_NORMAL); | |
3168 | |
3169 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX, | |
3170 mode, 1, default_label); | |
3171 } | |
3172 | |
3173 emit_jump (label_rtx (node->code_label)); | |
3174 } | |
3175 } | |
3176 } |