Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgexpand.c @ 70:b81903832de2
merge c-decl.c
author | Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 21 Aug 2011 09:24:16 +0900 |
parents | 1b10fe6932e1 |
children | ce75bd9117e4 |
comparison
equal
deleted
inserted
replaced
69:1b10fe6932e1 | 70:b81903832de2 |
---|---|
42 #include "debug.h" | 42 #include "debug.h" |
43 #include "params.h" | 43 #include "params.h" |
44 #include "tree-inline.h" | 44 #include "tree-inline.h" |
45 #include "value-prof.h" | 45 #include "value-prof.h" |
46 #include "target.h" | 46 #include "target.h" |
47 #ifndef noCbC | |
48 #include "cbc-tree.h" | |
49 #endif | |
50 #include "ssaexpand.h" | 47 #include "ssaexpand.h" |
51 #include "bitmap.h" | 48 #include "bitmap.h" |
52 #include "sbitmap.h" | 49 #include "sbitmap.h" |
53 #include "insn-attr.h" /* For INSN_SCHEDULING. */ | 50 #include "insn-attr.h" /* For INSN_SCHEDULING. */ |
54 | 51 |
77 gimple_assign_rhs1 (stmt), | 74 gimple_assign_rhs1 (stmt), |
78 gimple_assign_rhs2 (stmt), | 75 gimple_assign_rhs2 (stmt), |
79 gimple_assign_rhs3 (stmt)); | 76 gimple_assign_rhs3 (stmt)); |
80 else if (grhs_class == GIMPLE_BINARY_RHS) | 77 else if (grhs_class == GIMPLE_BINARY_RHS) |
81 t = build2 (gimple_assign_rhs_code (stmt), | 78 t = build2 (gimple_assign_rhs_code (stmt), |
82 TREE_TYPE (gimple_assign_lhs (stmt)), | 79 TREE_TYPE (gimple_assign_lhs (stmt)), |
83 gimple_assign_rhs1 (stmt), | 80 gimple_assign_rhs1 (stmt), |
84 gimple_assign_rhs2 (stmt)); | 81 gimple_assign_rhs2 (stmt)); |
85 else if (grhs_class == GIMPLE_UNARY_RHS) | 82 else if (grhs_class == GIMPLE_UNARY_RHS) |
86 t = build1 (gimple_assign_rhs_code (stmt), | 83 t = build1 (gimple_assign_rhs_code (stmt), |
87 TREE_TYPE (gimple_assign_lhs (stmt)), | 84 TREE_TYPE (gimple_assign_lhs (stmt)), |
88 gimple_assign_rhs1 (stmt)); | 85 gimple_assign_rhs1 (stmt)); |
89 else if (grhs_class == GIMPLE_SINGLE_RHS) | 86 else if (grhs_class == GIMPLE_SINGLE_RHS) |
90 { | 87 { |
91 t = gimple_assign_rhs1 (stmt); | 88 t = gimple_assign_rhs1 (stmt); |
92 /* Avoid modifying this tree in place below. */ | 89 /* Avoid modifying this tree in place below. */ |
93 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | 90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) |
124 { | 121 { |
125 if (TREE_CODE (t) == SSA_NAME) | 122 if (TREE_CODE (t) == SSA_NAME) |
126 { | 123 { |
127 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | 124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; |
128 if (x && !MEM_P (x)) | 125 if (x && !MEM_P (x)) |
129 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | 126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); |
130 /* For the benefit of debug information at -O0 (where vartracking | 127 /* For the benefit of debug information at -O0 (where vartracking |
131 doesn't run) record the place also in the base DECL if it's | 128 doesn't run) record the place also in the base DECL if it's |
132 a normal variable (not a parameter). */ | 129 a normal variable (not a parameter). */ |
133 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | 130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) |
134 { | 131 { |
135 tree var = SSA_NAME_VAR (t); | 132 tree var = SSA_NAME_VAR (t); |
136 /* If we don't yet have something recorded, just record it now. */ | 133 /* If we don't yet have something recorded, just record it now. */ |
137 if (!DECL_RTL_SET_P (var)) | 134 if (!DECL_RTL_SET_P (var)) |
138 SET_DECL_RTL (var, x); | 135 SET_DECL_RTL (var, x); |
139 /* If we have it set alrady to "multiple places" don't | 136 /* If we have it set alrady to "multiple places" don't |
140 change this. */ | 137 change this. */ |
141 else if (DECL_RTL (var) == pc_rtx) | 138 else if (DECL_RTL (var) == pc_rtx) |
142 ; | 139 ; |
143 /* If we have something recorded and it's not the same place | 140 /* If we have something recorded and it's not the same place |
144 as we want to record now, we have multiple partitions for the | 141 as we want to record now, we have multiple partitions for the |
145 same base variable, with different places. We can't just | 142 same base variable, with different places. We can't just |
146 randomly chose one, hence we have to say that we don't know. | 143 randomly chose one, hence we have to say that we don't know. |
147 This only happens with optimization, and there var-tracking | 144 This only happens with optimization, and there var-tracking |
148 will figure out the right thing. */ | 145 will figure out the right thing. */ |
149 else if (DECL_RTL (var) != x) | 146 else if (DECL_RTL (var) != x) |
150 SET_DECL_RTL (var, pc_rtx); | 147 SET_DECL_RTL (var, pc_rtx); |
151 } | 148 } |
152 } | 149 } |
153 else | 150 else |
154 SET_DECL_RTL (t, x); | 151 SET_DECL_RTL (t, x); |
155 } | 152 } |
156 | 153 |
259 struct stack_var *v; | 256 struct stack_var *v; |
260 | 257 |
261 if (stack_vars_num >= stack_vars_alloc) | 258 if (stack_vars_num >= stack_vars_alloc) |
262 { | 259 { |
263 if (stack_vars_alloc) | 260 if (stack_vars_alloc) |
264 stack_vars_alloc = stack_vars_alloc * 3 / 2; | 261 stack_vars_alloc = stack_vars_alloc * 3 / 2; |
265 else | 262 else |
266 stack_vars_alloc = 32; | 263 stack_vars_alloc = 32; |
267 stack_vars | 264 stack_vars |
268 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | 265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); |
269 } | 266 } |
270 v = &stack_vars[stack_vars_num]; | 267 v = &stack_vars[stack_vars_num]; |
271 | 268 |
272 v->decl = decl; | 269 v->decl = decl; |
273 v->offset = 0; | 270 v->offset = 0; |
334 return false; | 331 return false; |
335 | 332 |
336 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | 333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
337 if (TREE_CODE (field) == FIELD_DECL) | 334 if (TREE_CODE (field) == FIELD_DECL) |
338 if (aggregate_contains_union_type (TREE_TYPE (field))) | 335 if (aggregate_contains_union_type (TREE_TYPE (field))) |
339 return true; | 336 return true; |
340 | 337 |
341 return false; | 338 return false; |
342 } | 339 } |
343 | 340 |
344 /* A subroutine of expand_used_vars. If two variables X and Y have alias | 341 /* A subroutine of expand_used_vars. If two variables X and Y have alias |
362 bool aggr_i = AGGREGATE_TYPE_P (type_i); | 359 bool aggr_i = AGGREGATE_TYPE_P (type_i); |
363 bool contains_union; | 360 bool contains_union; |
364 | 361 |
365 contains_union = aggregate_contains_union_type (type_i); | 362 contains_union = aggregate_contains_union_type (type_i); |
366 for (j = 0; j < i; ++j) | 363 for (j = 0; j < i; ++j) |
367 { | 364 { |
368 tree type_j = TREE_TYPE (stack_vars[j].decl); | 365 tree type_j = TREE_TYPE (stack_vars[j].decl); |
369 bool aggr_j = AGGREGATE_TYPE_P (type_j); | 366 bool aggr_j = AGGREGATE_TYPE_P (type_j); |
370 if (aggr_i != aggr_j | 367 if (aggr_i != aggr_j |
371 /* Either the objects conflict by means of type based | 368 /* Either the objects conflict by means of type based |
372 aliasing rules, or we need to add a conflict. */ | 369 aliasing rules, or we need to add a conflict. */ |
373 || !objects_must_conflict_p (type_i, type_j) | 370 || !objects_must_conflict_p (type_i, type_j) |
374 /* In case the types do not conflict ensure that access | 371 /* In case the types do not conflict ensure that access |
375 to elements will conflict. In case of unions we have | 372 to elements will conflict. In case of unions we have |
376 to be careful as type based aliasing rules may say | 373 to be careful as type based aliasing rules may say |
377 access to the same memory does not conflict. So play | 374 access to the same memory does not conflict. So play |
378 safe and add a conflict in this case. */ | 375 safe and add a conflict in this case. */ |
379 || contains_union) | 376 || contains_union) |
380 add_stack_var_conflict (i, j); | 377 add_stack_var_conflict (i, j); |
381 } | 378 } |
382 } | 379 } |
383 } | 380 } |
384 | 381 |
385 /* A subroutine of partition_stack_vars. A comparison function for qsort, | 382 /* A subroutine of partition_stack_vars. A comparison function for qsort, |
386 sorting an array of indices by the properties of the object. */ | 383 sorting an array of indices by the properties of the object. */ |
421 Two SSA names are compared by their version, SSA names come before | 418 Two SSA names are compared by their version, SSA names come before |
422 non-SSA names, and two normal decls are compared by their DECL_UID. */ | 419 non-SSA names, and two normal decls are compared by their DECL_UID. */ |
423 if (TREE_CODE (decla) == SSA_NAME) | 420 if (TREE_CODE (decla) == SSA_NAME) |
424 { | 421 { |
425 if (TREE_CODE (declb) == SSA_NAME) | 422 if (TREE_CODE (declb) == SSA_NAME) |
426 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | 423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); |
427 else | 424 else |
428 return -1; | 425 return -1; |
429 } | 426 } |
430 else if (TREE_CODE (declb) == SSA_NAME) | 427 else if (TREE_CODE (declb) == SSA_NAME) |
431 return 1; | 428 return 1; |
432 else | 429 else |
433 uida = DECL_UID (decla), uidb = DECL_UID (declb); | 430 uida = DECL_UID (decla), uidb = DECL_UID (declb); |
443 together with other variables add all partition members to the pointed-to | 440 together with other variables add all partition members to the pointed-to |
444 variables bitmap. */ | 441 variables bitmap. */ |
445 | 442 |
446 static void | 443 static void |
447 add_partitioned_vars_to_ptset (struct pt_solution *pt, | 444 add_partitioned_vars_to_ptset (struct pt_solution *pt, |
448 struct pointer_map_t *decls_to_partitions, | 445 struct pointer_map_t *decls_to_partitions, |
449 struct pointer_set_t *visited, bitmap temp) | 446 struct pointer_set_t *visited, bitmap temp) |
450 { | 447 { |
451 bitmap_iterator bi; | 448 bitmap_iterator bi; |
452 unsigned i; | 449 unsigned i; |
453 bitmap *part; | 450 bitmap *part; |
454 | 451 |
455 if (pt->anything | 452 if (pt->anything |
456 || pt->vars == NULL | 453 || pt->vars == NULL |
457 /* The pointed-to vars bitmap is shared, it is enough to | 454 /* The pointed-to vars bitmap is shared, it is enough to |
458 visit it once. */ | 455 visit it once. */ |
459 || pointer_set_insert(visited, pt->vars)) | 456 || pointer_set_insert(visited, pt->vars)) |
460 return; | 457 return; |
461 | 458 |
462 bitmap_clear (temp); | 459 bitmap_clear (temp); |
463 | 460 |
464 /* By using a temporary bitmap to store all members of the partitions | 461 /* By using a temporary bitmap to store all members of the partitions |
465 we have to add we make sure to visit each of the partitions only | 462 we have to add we make sure to visit each of the partitions only |
466 once. */ | 463 once. */ |
467 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | 464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) |
468 if ((!temp | 465 if ((!temp |
469 || !bitmap_bit_p (temp, i)) | 466 || !bitmap_bit_p (temp, i)) |
470 && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | 467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions, |
471 (void *)(size_t) i))) | 468 (void *)(size_t) i))) |
472 bitmap_ior_into (temp, *part); | 469 bitmap_ior_into (temp, *part); |
473 if (!bitmap_empty_p (temp)) | 470 if (!bitmap_empty_p (temp)) |
474 bitmap_ior_into (pt->vars, temp); | 471 bitmap_ior_into (pt->vars, temp); |
475 } | 472 } |
476 | 473 |
496 if (stack_vars[i].representative != i | 493 if (stack_vars[i].representative != i |
497 || stack_vars[i].next == EOC) | 494 || stack_vars[i].next == EOC) |
498 continue; | 495 continue; |
499 | 496 |
500 if (!decls_to_partitions) | 497 if (!decls_to_partitions) |
501 { | 498 { |
502 decls_to_partitions = pointer_map_create (); | 499 decls_to_partitions = pointer_map_create (); |
503 cfun->gimple_df->decls_to_pointers = pointer_map_create (); | 500 cfun->gimple_df->decls_to_pointers = pointer_map_create (); |
504 } | 501 } |
505 | 502 |
506 /* Create an SSA_NAME that points to the partition for use | 503 /* Create an SSA_NAME that points to the partition for use |
507 as base during alias-oracle queries on RTL for bases that | 504 as base during alias-oracle queries on RTL for bases that |
508 have been partitioned. */ | 505 have been partitioned. */ |
509 if (var == NULL_TREE) | 506 if (var == NULL_TREE) |
510 var = create_tmp_var (ptr_type_node, NULL); | 507 var = create_tmp_var (ptr_type_node, NULL); |
511 name = make_ssa_name (var, NULL); | 508 name = make_ssa_name (var, NULL); |
512 | 509 |
513 /* Create bitmaps representing partitions. They will be used for | 510 /* Create bitmaps representing partitions. They will be used for |
514 points-to sets later, so use GGC alloc. */ | 511 points-to sets later, so use GGC alloc. */ |
515 part = BITMAP_GGC_ALLOC (); | 512 part = BITMAP_GGC_ALLOC (); |
543 unsigned i; | 540 unsigned i; |
544 struct pointer_set_t *visited = pointer_set_create (); | 541 struct pointer_set_t *visited = pointer_set_create (); |
545 bitmap temp = BITMAP_ALLOC (NULL); | 542 bitmap temp = BITMAP_ALLOC (NULL); |
546 | 543 |
547 for (i = 1; i < num_ssa_names; i++) | 544 for (i = 1; i < num_ssa_names; i++) |
548 { | 545 { |
549 tree name = ssa_name (i); | 546 tree name = ssa_name (i); |
550 struct ptr_info_def *pi; | 547 struct ptr_info_def *pi; |
551 | 548 |
552 if (name | 549 if (name |
553 && POINTER_TYPE_P (TREE_TYPE (name)) | 550 && POINTER_TYPE_P (TREE_TYPE (name)) |
554 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | 551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) |
555 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | 552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, |
556 visited, temp); | 553 visited, temp); |
557 } | 554 } |
558 | 555 |
559 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | 556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, |
560 decls_to_partitions, visited, temp); | 557 decls_to_partitions, visited, temp); |
561 | 558 |
562 pointer_set_destroy (visited); | 559 pointer_set_destroy (visited); |
597 | 594 |
598 /* Update the interference graph and merge the conflicts. */ | 595 /* Update the interference graph and merge the conflicts. */ |
599 if (vb->conflicts) | 596 if (vb->conflicts) |
600 { | 597 { |
601 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) | 598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) |
602 add_stack_var_conflict (a, stack_vars[u].representative); | 599 add_stack_var_conflict (a, stack_vars[u].representative); |
603 BITMAP_FREE (vb->conflicts); | 600 BITMAP_FREE (vb->conflicts); |
604 } | 601 } |
605 } | 602 } |
606 | 603 |
607 /* A subroutine of expand_used_vars. Binpack the variables into | 604 /* A subroutine of expand_used_vars. Binpack the variables into |
608 partitions constrained by the interference graph. The overall | 605 partitions constrained by the interference graph. The overall |
609 algorithm used is as follows: | 606 algorithm used is as follows: |
610 | 607 |
611 Sort the objects by size. | 608 Sort the objects by size. |
612 For each object A { | 609 For each object A { |
613 S = size(A) | 610 S = size(A) |
614 O = 0 | 611 O = 0 |
615 loop { | 612 loop { |
616 Look for the largest non-conflicting object B with size <= S. | 613 Look for the largest non-conflicting object B with size <= S. |
617 UNION (A, B) | 614 UNION (A, B) |
618 offset(B) = O | 615 offset(B) = O |
619 O += size(B) | 616 O += size(B) |
620 S -= size(B) | 617 S -= size(B) |
621 } | 618 } |
622 } | 619 } |
623 */ | 620 */ |
624 | 621 |
625 static void | 622 static void |
626 partition_stack_vars (void) | 623 partition_stack_vars (void) |
627 { | 624 { |
703 { | 700 { |
704 i = stack_vars_sorted[si]; | 701 i = stack_vars_sorted[si]; |
705 | 702 |
706 /* Skip variables that aren't partition representatives, for now. */ | 703 /* Skip variables that aren't partition representatives, for now. */ |
707 if (stack_vars[i].representative != i) | 704 if (stack_vars[i].representative != i) |
708 continue; | 705 continue; |
709 | 706 |
710 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | 707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC |
711 " align %u\n", (unsigned long) i, stack_vars[i].size, | 708 " align %u\n", (unsigned long) i, stack_vars[i].size, |
712 stack_vars[i].alignb); | 709 stack_vars[i].alignb); |
713 | 710 |
714 for (j = i; j != EOC; j = stack_vars[j].next) | 711 for (j = i; j != EOC; j = stack_vars[j].next) |
715 { | 712 { |
716 fputc ('\t', dump_file); | 713 fputc ('\t', dump_file); |
717 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | 714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); |
718 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | 715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", |
719 stack_vars[j].offset); | 716 stack_vars[j].offset); |
720 } | 717 } |
721 } | 718 } |
722 } | 719 } |
723 | 720 |
724 /* Assign rtl to DECL at BASE + OFFSET. */ | 721 /* Assign rtl to DECL at BASE + OFFSET. */ |
725 | 722 |
822 | 819 |
823 i = stack_vars_sorted[si]; | 820 i = stack_vars_sorted[si]; |
824 | 821 |
825 /* Skip variables that aren't partition representatives, for now. */ | 822 /* Skip variables that aren't partition representatives, for now. */ |
826 if (stack_vars[i].representative != i) | 823 if (stack_vars[i].representative != i) |
827 continue; | 824 continue; |
828 | 825 |
829 /* Skip variables that have already had rtl assigned. See also | 826 /* Skip variables that have already had rtl assigned. See also |
830 add_stack_var where we perpetrate this pc_rtx hack. */ | 827 add_stack_var where we perpetrate this pc_rtx hack. */ |
831 decl = stack_vars[i].decl; | 828 decl = stack_vars[i].decl; |
832 if ((TREE_CODE (decl) == SSA_NAME | 829 if ((TREE_CODE (decl) == SSA_NAME |
861 base = large_base; | 858 base = large_base; |
862 base_align = large_align; | 859 base_align = large_align; |
863 } | 860 } |
864 | 861 |
865 /* Create rtl for each variable based on their location within the | 862 /* Create rtl for each variable based on their location within the |
866 partition. */ | 863 partition. */ |
867 for (j = i; j != EOC; j = stack_vars[j].next) | 864 for (j = i; j != EOC; j = stack_vars[j].next) |
868 { | 865 { |
869 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | 866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); |
870 expand_one_stack_var_at (stack_vars[j].decl, | 867 expand_one_stack_var_at (stack_vars[j].decl, |
871 base, base_align, | 868 base, base_align, |
887 { | 884 { |
888 i = stack_vars_sorted[si]; | 885 i = stack_vars_sorted[si]; |
889 | 886 |
890 /* Skip variables that aren't partition representatives, for now. */ | 887 /* Skip variables that aren't partition representatives, for now. */ |
891 if (stack_vars[i].representative != i) | 888 if (stack_vars[i].representative != i) |
892 continue; | 889 continue; |
893 | 890 |
894 size += stack_vars[i].size; | 891 size += stack_vars[i].size; |
895 for (j = i; j != EOC; j = stack_vars[j].next) | 892 for (j = i; j != EOC; j = stack_vars[j].next) |
896 set_rtl (stack_vars[j].decl, NULL); | 893 set_rtl (stack_vars[j].decl, NULL); |
897 } | 894 } |
898 return size; | 895 return size; |
899 } | 896 } |
900 | 897 |
901 /* A subroutine of expand_one_var. Called to immediately assign rtl | 898 /* A subroutine of expand_one_var. Called to immediately assign rtl |
1025 var = SSAVAR (var); | 1022 var = SSAVAR (var); |
1026 | 1023 |
1027 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL) | 1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL) |
1028 { | 1025 { |
1029 /* Because we don't know if VAR will be in register or on stack, | 1026 /* Because we don't know if VAR will be in register or on stack, |
1030 we conservatively assume it will be on stack even if VAR is | 1027 we conservatively assume it will be on stack even if VAR is |
1031 eventually put into register after RA pass. For non-automatic | 1028 eventually put into register after RA pass. For non-automatic |
1032 variables, which won't be on stack, we collect alignment of | 1029 variables, which won't be on stack, we collect alignment of |
1033 type and ignore user specified alignment. */ | 1030 type and ignore user specified alignment. */ |
1034 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1035 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | 1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
1036 TYPE_MODE (TREE_TYPE (var)), | 1033 TYPE_MODE (TREE_TYPE (var)), |
1037 TYPE_ALIGN (TREE_TYPE (var))); | 1034 TYPE_ALIGN (TREE_TYPE (var))); |
1038 else if (DECL_HAS_VALUE_EXPR_P (var) | 1035 else if (DECL_HAS_VALUE_EXPR_P (var) |
1041 or variables which were assigned a stack slot already by | 1038 or variables which were assigned a stack slot already by |
1042 expand_one_stack_var_at - in the latter case DECL_ALIGN has been | 1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been |
1043 changed from the offset chosen to it. */ | 1040 changed from the offset chosen to it. */ |
1044 align = crtl->stack_alignment_estimated; | 1041 align = crtl->stack_alignment_estimated; |
1045 else | 1042 else |
1046 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); | 1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); |
1047 | 1044 |
1048 /* If the variable alignment is very large we'll dynamicaly allocate | 1045 /* If the variable alignment is very large we'll dynamicaly allocate |
1049 it, which means that in-frame portion is just a pointer. */ | 1046 it, which means that in-frame portion is just a pointer. */ |
1050 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | 1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
1051 align = POINTER_SIZE; | 1048 align = POINTER_SIZE; |
1068 crtl->max_used_stack_slot_alignment = align; | 1065 crtl->max_used_stack_slot_alignment = align; |
1069 | 1066 |
1070 if (TREE_CODE (origvar) == SSA_NAME) | 1067 if (TREE_CODE (origvar) == SSA_NAME) |
1071 { | 1068 { |
1072 gcc_assert (TREE_CODE (var) != VAR_DECL | 1069 gcc_assert (TREE_CODE (var) != VAR_DECL |
1073 || (!DECL_EXTERNAL (var) | 1070 || (!DECL_EXTERNAL (var) |
1074 && !DECL_HAS_VALUE_EXPR_P (var) | 1071 && !DECL_HAS_VALUE_EXPR_P (var) |
1075 && !TREE_STATIC (var) | 1072 && !TREE_STATIC (var) |
1076 && TREE_TYPE (var) != error_mark_node | 1073 && TREE_TYPE (var) != error_mark_node |
1077 && !DECL_HARD_REGISTER (var) | 1074 && !DECL_HARD_REGISTER (var) |
1078 && really_expand)); | 1075 && really_expand)); |
1079 } | 1076 } |
1080 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | 1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) |
1081 ; | 1078 ; |
1082 else if (DECL_EXTERNAL (var)) | 1079 else if (DECL_EXTERNAL (var)) |
1083 ; | 1080 ; |
1153 if (old_sv_num < this_sv_num) | 1150 if (old_sv_num < this_sv_num) |
1154 { | 1151 { |
1155 new_sv_num = stack_vars_num; | 1152 new_sv_num = stack_vars_num; |
1156 | 1153 |
1157 for (i = old_sv_num; i < new_sv_num; ++i) | 1154 for (i = old_sv_num; i < new_sv_num; ++i) |
1158 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) | 1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) |
1159 add_stack_var_conflict (i, j); | 1156 add_stack_var_conflict (i, j); |
1160 } | 1157 } |
1161 } | 1158 } |
1162 | 1159 |
1163 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | 1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1164 and clear TREE_USED on all local variables. */ | 1161 and clear TREE_USED on all local variables. */ |
1176 clear_tree_used (t); | 1173 clear_tree_used (t); |
1177 } | 1174 } |
1178 | 1175 |
1179 /* Examine TYPE and determine a bit mask of the following features. */ | 1176 /* Examine TYPE and determine a bit mask of the following features. */ |
1180 | 1177 |
1181 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | 1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1 |
1182 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | 1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2 |
1183 #define SPCT_HAS_ARRAY 4 | 1180 #define SPCT_HAS_ARRAY 4 |
1184 #define SPCT_HAS_AGGREGATE 8 | 1181 #define SPCT_HAS_AGGREGATE 8 |
1185 | 1182 |
1186 static unsigned int | 1183 static unsigned int |
1187 stack_protect_classify_type (tree type) | 1184 stack_protect_classify_type (tree type) |
1188 { | 1185 { |
1189 unsigned int ret = 0; | 1186 unsigned int ret = 0; |
1192 switch (TREE_CODE (type)) | 1189 switch (TREE_CODE (type)) |
1193 { | 1190 { |
1194 case ARRAY_TYPE: | 1191 case ARRAY_TYPE: |
1195 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | 1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); |
1196 if (t == char_type_node | 1193 if (t == char_type_node |
1197 || t == signed_char_type_node | 1194 || t == signed_char_type_node |
1198 || t == unsigned_char_type_node) | 1195 || t == unsigned_char_type_node) |
1199 { | 1196 { |
1200 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); | 1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1201 unsigned HOST_WIDE_INT len; | 1198 unsigned HOST_WIDE_INT len; |
1202 | 1199 |
1203 if (!TYPE_SIZE_UNIT (type) | 1200 if (!TYPE_SIZE_UNIT (type) |
1204 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | 1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) |
1205 len = max; | 1202 len = max; |
1203 else | |
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); | |
1205 | |
1206 if (len < max) | |
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1208 else | |
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1210 } | |
1206 else | 1211 else |
1207 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); | 1212 ret = SPCT_HAS_ARRAY; |
1208 | |
1209 if (len < max) | |
1210 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1211 else | |
1212 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1213 } | |
1214 else | |
1215 ret = SPCT_HAS_ARRAY; | |
1216 break; | 1213 break; |
1217 | 1214 |
1218 case UNION_TYPE: | 1215 case UNION_TYPE: |
1219 case QUAL_UNION_TYPE: | 1216 case QUAL_UNION_TYPE: |
1220 case RECORD_TYPE: | 1217 case RECORD_TYPE: |
1221 ret = SPCT_HAS_AGGREGATE; | 1218 ret = SPCT_HAS_AGGREGATE; |
1222 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | 1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) |
1223 if (TREE_CODE (t) == FIELD_DECL) | 1220 if (TREE_CODE (t) == FIELD_DECL) |
1224 ret |= stack_protect_classify_type (TREE_TYPE (t)); | 1221 ret |= stack_protect_classify_type (TREE_TYPE (t)); |
1225 break; | 1222 break; |
1226 | 1223 |
1227 default: | 1224 default: |
1228 break; | 1225 break; |
1229 } | 1226 } |
1246 has_short_buffer = true; | 1243 has_short_buffer = true; |
1247 | 1244 |
1248 if (flag_stack_protect == 2) | 1245 if (flag_stack_protect == 2) |
1249 { | 1246 { |
1250 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | 1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) |
1251 && !(bits & SPCT_HAS_AGGREGATE)) | 1248 && !(bits & SPCT_HAS_AGGREGATE)) |
1252 ret = 1; | 1249 ret = 1; |
1253 else if (bits & SPCT_HAS_ARRAY) | 1250 else if (bits & SPCT_HAS_ARRAY) |
1254 ret = 2; | 1251 ret = 2; |
1255 } | 1252 } |
1256 else | 1253 else |
1257 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | 1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; |
1258 | 1255 |
1259 if (ret) | 1256 if (ret) |
1292 | 1289 |
1293 for (i = 0; i < n; ++i) | 1290 for (i = 0; i < n; ++i) |
1294 { | 1291 { |
1295 unsigned char ph_i = phase[i]; | 1292 unsigned char ph_i = phase[i]; |
1296 for (j = 0; j < i; ++j) | 1293 for (j = 0; j < i; ++j) |
1297 if (ph_i != phase[j]) | 1294 if (ph_i != phase[j]) |
1298 add_stack_var_conflict (i, j); | 1295 add_stack_var_conflict (i, j); |
1299 } | 1296 } |
1300 | 1297 |
1301 XDELETEVEC (phase); | 1298 XDELETEVEC (phase); |
1302 } | 1299 } |
1303 | 1300 |
1305 | 1302 |
1306 static void | 1303 static void |
1307 create_stack_guard (void) | 1304 create_stack_guard (void) |
1308 { | 1305 { |
1309 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), | 1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
1310 VAR_DECL, NULL, ptr_type_node); | 1307 VAR_DECL, NULL, ptr_type_node); |
1311 TREE_THIS_VOLATILE (guard) = 1; | 1308 TREE_THIS_VOLATILE (guard) = 1; |
1312 TREE_USED (guard) = 1; | 1309 TREE_USED (guard) = 1; |
1313 expand_one_stack_var (guard); | 1310 expand_one_stack_var (guard); |
1314 crtl->stack_protect_guard = guard; | 1311 crtl->stack_protect_guard = guard; |
1315 } | 1312 } |
1373 if (stack_vars_num > 0) | 1370 if (stack_vars_num > 0) |
1374 { | 1371 { |
1375 /* Fake sorting the stack vars for account_stack_vars (). */ | 1372 /* Fake sorting the stack vars for account_stack_vars (). */ |
1376 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | 1373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); |
1377 for (i = 0; i < stack_vars_num; ++i) | 1374 for (i = 0; i < stack_vars_num; ++i) |
1378 stack_vars_sorted[i] = i; | 1375 stack_vars_sorted[i] = i; |
1379 size += account_stack_vars (); | 1376 size += account_stack_vars (); |
1380 fini_vars_expansion (); | 1377 fini_vars_expansion (); |
1381 } | 1378 } |
1382 pop_cfun (); | 1379 pop_cfun (); |
1383 current_function_decl = old_cur_fun_decl; | 1380 current_function_decl = old_cur_fun_decl; |
1407 { | 1404 { |
1408 tree var = partition_to_var (SA.map, i); | 1405 tree var = partition_to_var (SA.map, i); |
1409 | 1406 |
1410 gcc_assert (is_gimple_reg (var)); | 1407 gcc_assert (is_gimple_reg (var)); |
1411 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | 1408 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) |
1412 expand_one_var (var, true, true); | 1409 expand_one_var (var, true, true); |
1413 else | 1410 else |
1414 { | 1411 { |
1415 /* This is a PARM_DECL or RESULT_DECL. For those partitions that | 1412 /* This is a PARM_DECL or RESULT_DECL. For those partitions that |
1416 contain the default def (representing the parm or result itself) | 1413 contain the default def (representing the parm or result itself) |
1417 we don't do anything here. But those which don't contain the | 1414 we don't do anything here. But those which don't contain the |
1418 default def (representing a temporary based on the parm/result) | 1415 default def (representing a temporary based on the parm/result) |
1419 we need to allocate space just like for normal VAR_DECLs. */ | 1416 we need to allocate space just like for normal VAR_DECLs. */ |
1420 if (!bitmap_bit_p (SA.partition_has_default_def, i)) | 1417 if (!bitmap_bit_p (SA.partition_has_default_def, i)) |
1421 { | 1418 { |
1422 expand_one_var (var, true, true); | 1419 expand_one_var (var, true, true); |
1423 gcc_assert (SA.partition_to_pseudo[i]); | 1420 gcc_assert (SA.partition_to_pseudo[i]); |
1424 } | 1421 } |
1425 } | 1422 } |
1426 } | 1423 } |
1427 | 1424 |
1428 /* At this point all variables on the local_decls with TREE_USED | 1425 /* At this point all variables on the local_decls with TREE_USED |
1429 set are not associated with any block scope. Lay them out. */ | 1426 set are not associated with any block scope. Lay them out. */ |
1430 | 1427 |
1438 { | 1435 { |
1439 TREE_USED (var) = 0; | 1436 TREE_USED (var) = 0; |
1440 goto next; | 1437 goto next; |
1441 } | 1438 } |
1442 /* We didn't set a block for static or extern because it's hard | 1439 /* We didn't set a block for static or extern because it's hard |
1443 to tell the difference between a global variable (re)declared | 1440 to tell the difference between a global variable (re)declared |
1444 in a local scope, and one that's really declared there to | 1441 in a local scope, and one that's really declared there to |
1445 begin with. And it doesn't really matter much, since we're | 1442 begin with. And it doesn't really matter much, since we're |
1446 not giving them stack space. Expand them now. */ | 1443 not giving them stack space. Expand them now. */ |
1447 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 1444 else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1448 expand_now = true; | 1445 expand_now = true; |
1449 | 1446 |
1450 /* If the variable is not associated with any block, then it | 1447 /* If the variable is not associated with any block, then it |
1451 was created by the optimizers, and could be live anywhere | 1448 was created by the optimizers, and could be live anywhere |
1452 in the function. */ | 1449 in the function. */ |
1453 else if (TREE_USED (var)) | 1450 else if (TREE_USED (var)) |
1454 expand_now = true; | 1451 expand_now = true; |
1455 | 1452 |
1456 /* Finally, mark all variables on the list as used. We'll use | 1453 /* Finally, mark all variables on the list as used. We'll use |
1457 this in a moment when we expand those associated with scopes. */ | 1454 this in a moment when we expand those associated with scopes. */ |
1458 TREE_USED (var) = 1; | 1455 TREE_USED (var) = 1; |
1459 | 1456 |
1460 if (expand_now) | 1457 if (expand_now) |
1461 expand_one_var (var, true, true); | 1458 expand_one_var (var, true, true); |
1462 | 1459 |
1496 expand_used_vars_for_block (outer_block, true); | 1493 expand_used_vars_for_block (outer_block, true); |
1497 | 1494 |
1498 if (stack_vars_num > 0) | 1495 if (stack_vars_num > 0) |
1499 { | 1496 { |
1500 /* Due to the way alias sets work, no variables with non-conflicting | 1497 /* Due to the way alias sets work, no variables with non-conflicting |
1501 alias sets may be assigned the same address. Add conflicts to | 1498 alias sets may be assigned the same address. Add conflicts to |
1502 reflect this. */ | 1499 reflect this. */ |
1503 add_alias_set_conflicts (); | 1500 add_alias_set_conflicts (); |
1504 | 1501 |
1505 /* If stack protection is enabled, we don't share space between | 1502 /* If stack protection is enabled, we don't share space between |
1506 vulnerable data and non-vulnerable data. */ | 1503 vulnerable data and non-vulnerable data. */ |
1507 if (flag_stack_protect) | 1504 if (flag_stack_protect) |
1508 add_stack_protection_conflicts (); | 1505 add_stack_protection_conflicts (); |
1509 | 1506 |
1510 /* Now that we have collected all stack variables, and have computed a | 1507 /* Now that we have collected all stack variables, and have computed a |
1511 minimal interference graph, attempt to save some stack space. */ | 1508 minimal interference graph, attempt to save some stack space. */ |
1512 partition_stack_vars (); | 1509 partition_stack_vars (); |
1513 if (dump_file) | 1510 if (dump_file) |
1514 dump_stack_var_partition (); | 1511 dump_stack_var_partition (); |
1515 } | 1512 } |
1516 | 1513 |
1517 /* There are several conditions under which we should create a | 1514 /* There are several conditions under which we should create a |
1518 stack guard: protect-all, alloca used, protected decls present. */ | 1515 stack guard: protect-all, alloca used, protected decls present. */ |
1519 if (flag_stack_protect == 2 | 1516 if (flag_stack_protect == 2 |
1520 || (flag_stack_protect | 1517 || (flag_stack_protect |
1521 && (cfun->calls_alloca || has_protected_decls))) | 1518 && (cfun->calls_alloca || has_protected_decls))) |
1522 create_stack_guard (); | 1519 create_stack_guard (); |
1523 | 1520 |
1524 /* Assign rtl to each variable based on these partitions. */ | 1521 /* Assign rtl to each variable based on these partitions. */ |
1525 if (stack_vars_num > 0) | 1522 if (stack_vars_num > 0) |
1526 { | 1523 { |
1527 /* Reorder decls to be protected by iterating over the variables | 1524 /* Reorder decls to be protected by iterating over the variables |
1528 array multiple times, and allocating out of each phase in turn. */ | 1525 array multiple times, and allocating out of each phase in turn. */ |
1529 /* ??? We could probably integrate this into the qsort we did | 1526 /* ??? We could probably integrate this into the qsort we did |
1530 earlier, such that we naturally see these variables first, | 1527 earlier, such that we naturally see these variables first, |
1531 and thus naturally allocate things in the right order. */ | 1528 and thus naturally allocate things in the right order. */ |
1532 if (has_protected_decls) | 1529 if (has_protected_decls) |
1533 { | 1530 { |
1534 /* Phase 1 contains only character arrays. */ | 1531 /* Phase 1 contains only character arrays. */ |
1535 expand_stack_vars (stack_protect_decl_phase_1); | 1532 expand_stack_vars (stack_protect_decl_phase_1); |
1536 | 1533 |
1537 /* Phase 2 contains other kinds of arrays. */ | 1534 /* Phase 2 contains other kinds of arrays. */ |
1538 if (flag_stack_protect == 2) | 1535 if (flag_stack_protect == 2) |
1539 expand_stack_vars (stack_protect_decl_phase_2); | 1536 expand_stack_vars (stack_protect_decl_phase_2); |
1540 } | 1537 } |
1541 | 1538 |
1542 expand_stack_vars (NULL); | 1539 expand_stack_vars (NULL); |
1543 | 1540 |
1544 fini_vars_expansion (); | 1541 fini_vars_expansion (); |
1545 } | 1542 } |
1561 /* If the target requires that FRAME_OFFSET be aligned, do it. */ | 1558 /* If the target requires that FRAME_OFFSET be aligned, do it. */ |
1562 if (STACK_ALIGNMENT_NEEDED) | 1559 if (STACK_ALIGNMENT_NEEDED) |
1563 { | 1560 { |
1564 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 1561 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1565 if (!FRAME_GROWS_DOWNWARD) | 1562 if (!FRAME_GROWS_DOWNWARD) |
1566 frame_offset += align - 1; | 1563 frame_offset += align - 1; |
1567 frame_offset &= -align; | 1564 frame_offset &= -align; |
1568 } | 1565 } |
1569 } | 1566 } |
1570 | 1567 |
1571 | 1568 |
1578 { | 1575 { |
1579 if (dump_file && (dump_flags & TDF_DETAILS)) | 1576 if (dump_file && (dump_flags & TDF_DETAILS)) |
1580 { | 1577 { |
1581 fprintf (dump_file, "\n;; "); | 1578 fprintf (dump_file, "\n;; "); |
1582 print_gimple_stmt (dump_file, stmt, 0, | 1579 print_gimple_stmt (dump_file, stmt, 0, |
1583 TDF_SLIM | (dump_flags & TDF_LINENO)); | 1580 TDF_SLIM | (dump_flags & TDF_LINENO)); |
1584 fprintf (dump_file, "\n"); | 1581 fprintf (dump_file, "\n"); |
1585 | 1582 |
1586 print_rtl (dump_file, since ? NEXT_INSN (since) : since); | 1583 print_rtl (dump_file, since ? NEXT_INSN (since) : since); |
1587 } | 1584 } |
1588 } | 1585 } |
1612 | 1609 |
1613 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 1610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
1614 { | 1611 { |
1615 lab_stmt = gsi_stmt (gsi); | 1612 lab_stmt = gsi_stmt (gsi); |
1616 if (gimple_code (lab_stmt) != GIMPLE_LABEL) | 1613 if (gimple_code (lab_stmt) != GIMPLE_LABEL) |
1617 break; | 1614 break; |
1618 | 1615 |
1619 lab = gimple_label_label (lab_stmt); | 1616 lab = gimple_label_label (lab_stmt); |
1620 if (DECL_NONLOCAL (lab)) | 1617 if (DECL_NONLOCAL (lab)) |
1621 break; | 1618 break; |
1622 | 1619 |
1623 return label_rtx (lab); | 1620 return label_rtx (lab); |
1624 } | 1621 } |
1625 | 1622 |
1626 elt = pointer_map_insert (lab_rtx_for_bb, bb); | 1623 elt = pointer_map_insert (lab_rtx_for_bb, bb); |
1651 if (BARRIER_P (get_last_insn ())) | 1648 if (BARRIER_P (get_last_insn ())) |
1652 { | 1649 { |
1653 rtx insn; | 1650 rtx insn; |
1654 remove_edge (e); | 1651 remove_edge (e); |
1655 /* Now, we have a single successor block, if we have insns to | 1652 /* Now, we have a single successor block, if we have insns to |
1656 insert on the remaining edge we potentially will insert | 1653 insert on the remaining edge we potentially will insert |
1657 it at the end of this block (if the dest block isn't feasible) | 1654 it at the end of this block (if the dest block isn't feasible) |
1658 in order to avoid splitting the edge. This insertion will take | 1655 in order to avoid splitting the edge. This insertion will take |
1659 place in front of the last jump. But we might have emitted | 1656 place in front of the last jump. But we might have emitted |
1660 multiple jumps (conditional and one unconditional) to the | 1657 multiple jumps (conditional and one unconditional) to the |
1661 same destination. Inserting in front of the last one then | 1658 same destination. Inserting in front of the last one then |
1662 is a problem. See PR 40021. We fix this by deleting all | 1659 is a problem. See PR 40021. We fix this by deleting all |
1663 jumps except the last unconditional one. */ | 1660 jumps except the last unconditional one. */ |
1664 insn = PREV_INSN (get_last_insn ()); | 1661 insn = PREV_INSN (get_last_insn ()); |
1665 /* Make sure we have an unconditional jump. Otherwise we're | 1662 /* Make sure we have an unconditional jump. Otherwise we're |
1666 confused. */ | 1663 confused. */ |
1667 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | 1664 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); |
1668 for (insn = PREV_INSN (insn); insn != last;) | 1665 for (insn = PREV_INSN (insn); insn != last;) |
1669 { | 1666 { |
1670 insn = PREV_INSN (insn); | 1667 insn = PREV_INSN (insn); |
1671 if (JUMP_P (NEXT_INSN (insn))) | 1668 if (JUMP_P (NEXT_INSN (insn))) |
1712 && TREE_CODE (op0) == SSA_NAME | 1709 && TREE_CODE (op0) == SSA_NAME |
1713 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | 1710 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) |
1714 { | 1711 { |
1715 gimple second = SSA_NAME_DEF_STMT (op0); | 1712 gimple second = SSA_NAME_DEF_STMT (op0); |
1716 if (gimple_code (second) == GIMPLE_ASSIGN) | 1713 if (gimple_code (second) == GIMPLE_ASSIGN) |
1717 { | 1714 { |
1718 enum tree_code code2 = gimple_assign_rhs_code (second); | 1715 enum tree_code code2 = gimple_assign_rhs_code (second); |
1719 if (TREE_CODE_CLASS (code2) == tcc_comparison) | 1716 if (TREE_CODE_CLASS (code2) == tcc_comparison) |
1720 { | 1717 { |
1721 code = code2; | 1718 code = code2; |
1722 op0 = gimple_assign_rhs1 (second); | 1719 op0 = gimple_assign_rhs1 (second); |
1723 op1 = gimple_assign_rhs2 (second); | 1720 op1 = gimple_assign_rhs2 (second); |
1724 } | 1721 } |
1725 /* If jumps are cheap turn some more codes into | 1722 /* If jumps are cheap turn some more codes into |
1726 jumpy sequences. */ | 1723 jumpy sequences. */ |
1727 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | 1724 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) |
1728 { | 1725 { |
1729 if ((code2 == BIT_AND_EXPR | 1726 if ((code2 == BIT_AND_EXPR |
1730 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | 1727 && TYPE_PRECISION (TREE_TYPE (op0)) == 1 |
1731 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | 1728 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) |
1732 || code2 == TRUTH_AND_EXPR) | 1729 || code2 == TRUTH_AND_EXPR) |
1733 { | 1730 { |
1734 code = TRUTH_ANDIF_EXPR; | 1731 code = TRUTH_ANDIF_EXPR; |
1735 op0 = gimple_assign_rhs1 (second); | 1732 op0 = gimple_assign_rhs1 (second); |
1736 op1 = gimple_assign_rhs2 (second); | 1733 op1 = gimple_assign_rhs2 (second); |
1737 } | 1734 } |
1738 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) | 1735 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) |
1739 { | 1736 { |
1740 code = TRUTH_ORIF_EXPR; | 1737 code = TRUTH_ORIF_EXPR; |
1741 op0 = gimple_assign_rhs1 (second); | 1738 op0 = gimple_assign_rhs1 (second); |
1742 op1 = gimple_assign_rhs2 (second); | 1739 op1 = gimple_assign_rhs2 (second); |
1743 } | 1740 } |
1744 } | 1741 } |
1745 } | 1742 } |
1746 } | 1743 } |
1747 | 1744 |
1748 last2 = last = get_last_insn (); | 1745 last2 = last = get_last_insn (); |
1749 | 1746 |
1750 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | 1747 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); |
1764 { | 1761 { |
1765 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), | 1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1766 true_edge->probability); | 1763 true_edge->probability); |
1767 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1764 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1768 if (true_edge->goto_locus) | 1765 if (true_edge->goto_locus) |
1769 { | 1766 { |
1770 set_curr_insn_source_location (true_edge->goto_locus); | 1767 set_curr_insn_source_location (true_edge->goto_locus); |
1771 set_curr_insn_block (true_edge->goto_block); | 1768 set_curr_insn_block (true_edge->goto_block); |
1772 true_edge->goto_locus = curr_insn_locator (); | 1769 true_edge->goto_locus = curr_insn_locator (); |
1773 } | 1770 } |
1774 true_edge->goto_block = NULL; | 1771 true_edge->goto_block = NULL; |
1775 false_edge->flags |= EDGE_FALLTHRU; | 1772 false_edge->flags |= EDGE_FALLTHRU; |
1776 maybe_cleanup_end_of_block (false_edge, last); | 1773 maybe_cleanup_end_of_block (false_edge, last); |
1777 return NULL; | 1774 return NULL; |
1778 } | 1775 } |
1780 { | 1777 { |
1781 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), | 1778 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), |
1782 false_edge->probability); | 1779 false_edge->probability); |
1783 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1780 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1784 if (false_edge->goto_locus) | 1781 if (false_edge->goto_locus) |
1785 { | 1782 { |
1786 set_curr_insn_source_location (false_edge->goto_locus); | 1783 set_curr_insn_source_location (false_edge->goto_locus); |
1787 set_curr_insn_block (false_edge->goto_block); | 1784 set_curr_insn_block (false_edge->goto_block); |
1788 false_edge->goto_locus = curr_insn_locator (); | 1785 false_edge->goto_locus = curr_insn_locator (); |
1789 } | 1786 } |
1790 false_edge->goto_block = NULL; | 1787 false_edge->goto_block = NULL; |
1791 true_edge->flags |= EDGE_FALLTHRU; | 1788 true_edge->flags |= EDGE_FALLTHRU; |
1792 maybe_cleanup_end_of_block (true_edge, last); | 1789 maybe_cleanup_end_of_block (true_edge, last); |
1793 return NULL; | 1790 return NULL; |
1794 } | 1791 } |
1902 switch (gimple_code (stmt)) | 1899 switch (gimple_code (stmt)) |
1903 { | 1900 { |
1904 case GIMPLE_GOTO: | 1901 case GIMPLE_GOTO: |
1905 op0 = gimple_goto_dest (stmt); | 1902 op0 = gimple_goto_dest (stmt); |
1906 if (TREE_CODE (op0) == LABEL_DECL) | 1903 if (TREE_CODE (op0) == LABEL_DECL) |
1907 expand_goto (op0); | 1904 expand_goto (op0); |
1908 else | 1905 else |
1909 expand_computed_goto (op0); | 1906 expand_computed_goto (op0); |
1910 break; | 1907 break; |
1911 case GIMPLE_LABEL: | 1908 case GIMPLE_LABEL: |
1912 expand_label (gimple_label_label (stmt)); | 1909 expand_label (gimple_label_label (stmt)); |
1913 break; | 1910 break; |
1914 case GIMPLE_NOP: | 1911 case GIMPLE_NOP: |
1926 | 1923 |
1927 case GIMPLE_RETURN: | 1924 case GIMPLE_RETURN: |
1928 op0 = gimple_return_retval (stmt); | 1925 op0 = gimple_return_retval (stmt); |
1929 | 1926 |
1930 if (op0 && op0 != error_mark_node) | 1927 if (op0 && op0 != error_mark_node) |
1931 { | 1928 { |
1932 tree result = DECL_RESULT (current_function_decl); | 1929 tree result = DECL_RESULT (current_function_decl); |
1933 | 1930 |
1934 /* If we are not returning the current function's RESULT_DECL, | 1931 /* If we are not returning the current function's RESULT_DECL, |
1935 build an assignment to it. */ | 1932 build an assignment to it. */ |
1936 if (op0 != result) | 1933 if (op0 != result) |
1937 { | 1934 { |
1938 /* I believe that a function's RESULT_DECL is unique. */ | 1935 /* I believe that a function's RESULT_DECL is unique. */ |
1939 gcc_assert (TREE_CODE (op0) != RESULT_DECL); | 1936 gcc_assert (TREE_CODE (op0) != RESULT_DECL); |
1940 | 1937 |
1941 /* ??? We'd like to use simply expand_assignment here, | 1938 /* ??? We'd like to use simply expand_assignment here, |
1942 but this fails if the value is of BLKmode but the return | 1939 but this fails if the value is of BLKmode but the return |
1943 decl is a register. expand_return has special handling | 1940 decl is a register. expand_return has special handling |
1944 for this combination, which eventually should move | 1941 for this combination, which eventually should move |
1945 to common code. See comments there. Until then, let's | 1942 to common code. See comments there. Until then, let's |
1946 build a modify expression :-/ */ | 1943 build a modify expression :-/ */ |
1947 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | 1944 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), |
1948 result, op0); | 1945 result, op0); |
1949 } | 1946 } |
1950 } | 1947 } |
1951 if (!op0) | 1948 if (!op0) |
1952 expand_null_return (); | 1949 expand_null_return (); |
1953 else | 1950 else |
1954 expand_return (op0); | 1951 expand_return (op0); |
1955 break; | 1952 break; |
1956 | 1953 |
1957 case GIMPLE_ASSIGN: | 1954 case GIMPLE_ASSIGN: |
1958 { | 1955 { |
1959 tree lhs = gimple_assign_lhs (stmt); | 1956 tree lhs = gimple_assign_lhs (stmt); |
2090 lp_nr = lookup_stmt_eh_lp (stmt); | 2087 lp_nr = lookup_stmt_eh_lp (stmt); |
2091 if (lp_nr) | 2088 if (lp_nr) |
2092 { | 2089 { |
2093 rtx insn; | 2090 rtx insn; |
2094 for (insn = next_real_insn (last); insn; | 2091 for (insn = next_real_insn (last); insn; |
2095 insn = next_real_insn (insn)) | 2092 insn = next_real_insn (insn)) |
2096 { | 2093 { |
2097 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | 2094 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) |
2098 /* If we want exceptions for non-call insns, any | 2095 /* If we want exceptions for non-call insns, any |
2099 may_trap_p instruction may throw. */ | 2096 may_trap_p instruction may throw. */ |
2100 && GET_CODE (PATTERN (insn)) != CLOBBER | 2097 && GET_CODE (PATTERN (insn)) != CLOBBER |
2101 && GET_CODE (PATTERN (insn)) != USE | 2098 && GET_CODE (PATTERN (insn)) != USE |
2102 && insn_could_throw_p (insn)) | 2099 && insn_could_throw_p (insn)) |
2103 make_reg_eh_region_note (insn, 0, lp_nr); | 2100 make_reg_eh_region_note (insn, 0, lp_nr); |
2104 } | 2101 } |
2105 } | 2102 } |
2106 | 2103 |
2107 return last; | 2104 return last; |
2108 } | 2105 } |
2109 | 2106 |
2153 count = 0; | 2150 count = 0; |
2154 | 2151 |
2155 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 2152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
2156 { | 2153 { |
2157 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) | 2154 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
2158 { | 2155 { |
2159 if (e->dest != EXIT_BLOCK_PTR) | 2156 if (e->dest != EXIT_BLOCK_PTR) |
2160 { | 2157 { |
2161 e->dest->count -= e->count; | 2158 e->dest->count -= e->count; |
2162 e->dest->frequency -= EDGE_FREQUENCY (e); | 2159 e->dest->frequency -= EDGE_FREQUENCY (e); |
2163 if (e->dest->count < 0) | 2160 if (e->dest->count < 0) |
2164 e->dest->count = 0; | 2161 e->dest->count = 0; |
2165 if (e->dest->frequency < 0) | 2162 if (e->dest->frequency < 0) |
2166 e->dest->frequency = 0; | 2163 e->dest->frequency = 0; |
2167 } | 2164 } |
2168 count += e->count; | 2165 count += e->count; |
2169 probability += e->probability; | 2166 probability += e->probability; |
2170 remove_edge (e); | 2167 remove_edge (e); |
2171 } | 2168 } |
2172 else | 2169 else |
2173 ei_next (&ei); | 2170 ei_next (&ei); |
2174 } | 2171 } |
2175 | 2172 |
2176 /* This is somewhat ugly: the call_expr expander often emits instructions | 2173 /* This is somewhat ugly: the call_expr expander often emits instructions |
2177 after the sibcall (to perform the function return). These confuse the | 2174 after the sibcall (to perform the function return). These confuse the |
2178 find_many_sub_basic_blocks code, so we need to get rid of these. */ | 2175 find_many_sub_basic_blocks code, so we need to get rid of these. */ |
2181 | 2178 |
2182 *can_fallthru = false; | 2179 *can_fallthru = false; |
2183 while (NEXT_INSN (last)) | 2180 while (NEXT_INSN (last)) |
2184 { | 2181 { |
2185 /* For instance an sqrt builtin expander expands if with | 2182 /* For instance an sqrt builtin expander expands if with |
2186 sibcall in the then and label for `else`. */ | 2183 sibcall in the then and label for `else`. */ |
2187 if (LABEL_P (NEXT_INSN (last))) | 2184 if (LABEL_P (NEXT_INSN (last))) |
2188 { | 2185 { |
2189 *can_fallthru = true; | 2186 *can_fallthru = true; |
2190 break; | 2187 break; |
2191 } | 2188 } |
2192 delete_insn (NEXT_INSN (last)); | 2189 delete_insn (NEXT_INSN (last)); |
2193 } | 2190 } |
2194 | 2191 |
2195 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | 2192 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); |
2196 e->probability += probability; | 2193 e->probability += probability; |
2202 { | 2199 { |
2203 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | 2200 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); |
2204 | 2201 |
2205 last = BB_END (bb); | 2202 last = BB_END (bb); |
2206 if (BARRIER_P (last)) | 2203 if (BARRIER_P (last)) |
2207 BB_END (bb) = PREV_INSN (last); | 2204 BB_END (bb) = PREV_INSN (last); |
2208 } | 2205 } |
2209 | 2206 |
2210 maybe_dump_rtl_for_gimple_stmt (stmt, last2); | 2207 maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
2211 | 2208 |
2212 return bb; | 2209 return bb; |
2220 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | 2217 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ |
2221 return gen_rtx_IF_THEN_ELSE | 2218 return gen_rtx_IF_THEN_ELSE |
2222 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | 2219 (mode, gen_rtx_NE (BImode, mod, const0_rtx), |
2223 gen_rtx_IF_THEN_ELSE | 2220 gen_rtx_IF_THEN_ELSE |
2224 (mode, gen_rtx_LT (BImode, | 2221 (mode, gen_rtx_LT (BImode, |
2225 gen_rtx_DIV (mode, op1, mod), | 2222 gen_rtx_DIV (mode, op1, mod), |
2226 const0_rtx), | 2223 const0_rtx), |
2227 constm1_rtx, const0_rtx), | 2224 constm1_rtx, const0_rtx), |
2228 const0_rtx); | 2225 const0_rtx); |
2229 } | 2226 } |
2230 | 2227 |
2231 /* Return the difference between the ceil and the truncated result of | 2228 /* Return the difference between the ceil and the truncated result of |
2236 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | 2233 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ |
2237 return gen_rtx_IF_THEN_ELSE | 2234 return gen_rtx_IF_THEN_ELSE |
2238 (mode, gen_rtx_NE (BImode, mod, const0_rtx), | 2235 (mode, gen_rtx_NE (BImode, mod, const0_rtx), |
2239 gen_rtx_IF_THEN_ELSE | 2236 gen_rtx_IF_THEN_ELSE |
2240 (mode, gen_rtx_GT (BImode, | 2237 (mode, gen_rtx_GT (BImode, |
2241 gen_rtx_DIV (mode, op1, mod), | 2238 gen_rtx_DIV (mode, op1, mod), |
2242 const0_rtx), | 2239 const0_rtx), |
2243 const1_rtx, const0_rtx), | 2240 const1_rtx, const0_rtx), |
2244 const0_rtx); | 2241 const0_rtx); |
2245 } | 2242 } |
2246 | 2243 |
2247 /* Return the difference between the ceil and the truncated result of | 2244 /* Return the difference between the ceil and the truncated result of |
2264 /* (abs (mod) >= abs (op1) - abs (mod) | 2261 /* (abs (mod) >= abs (op1) - abs (mod) |
2265 ? (op1 / mod > 0 ? 1 : -1) | 2262 ? (op1 / mod > 0 ? 1 : -1) |
2266 : 0) */ | 2263 : 0) */ |
2267 return gen_rtx_IF_THEN_ELSE | 2264 return gen_rtx_IF_THEN_ELSE |
2268 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), | 2265 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), |
2269 gen_rtx_MINUS (mode, | 2266 gen_rtx_MINUS (mode, |
2270 gen_rtx_ABS (mode, op1), | 2267 gen_rtx_ABS (mode, op1), |
2271 gen_rtx_ABS (mode, mod))), | 2268 gen_rtx_ABS (mode, mod))), |
2272 gen_rtx_IF_THEN_ELSE | 2269 gen_rtx_IF_THEN_ELSE |
2273 (mode, gen_rtx_GT (BImode, | 2270 (mode, gen_rtx_GT (BImode, |
2274 gen_rtx_DIV (mode, op1, mod), | 2271 gen_rtx_DIV (mode, op1, mod), |
2275 const0_rtx), | 2272 const0_rtx), |
2276 const1_rtx, constm1_rtx), | 2273 const1_rtx, constm1_rtx), |
2277 const0_rtx); | 2274 const0_rtx); |
2278 } | 2275 } |
2279 | 2276 |
2280 /* Return the difference between the rounded and the truncated result | 2277 /* Return the difference between the rounded and the truncated result |
2285 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | 2282 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) |
2286 { | 2283 { |
2287 /* (mod >= op1 - mod ? 1 : 0) */ | 2284 /* (mod >= op1 - mod ? 1 : 0) */ |
2288 return gen_rtx_IF_THEN_ELSE | 2285 return gen_rtx_IF_THEN_ELSE |
2289 (mode, gen_rtx_GE (BImode, mod, | 2286 (mode, gen_rtx_GE (BImode, mod, |
2290 gen_rtx_MINUS (mode, op1, mod)), | 2287 gen_rtx_MINUS (mode, op1, mod)), |
2291 const1_rtx, const0_rtx); | 2288 const1_rtx, const0_rtx); |
2292 } | 2289 } |
2293 | 2290 |
2294 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting | 2291 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting |
2295 any rtl. */ | 2292 any rtl. */ |
2314 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | 2311 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) |
2315 return x; | 2312 return x; |
2316 | 2313 |
2317 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | 2314 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) |
2318 x = simplify_gen_subreg (mode, x, xmode, | 2315 x = simplify_gen_subreg (mode, x, xmode, |
2319 subreg_lowpart_offset | 2316 subreg_lowpart_offset |
2320 (mode, xmode)); | 2317 (mode, xmode)); |
2321 else if (POINTERS_EXTEND_UNSIGNED > 0) | 2318 else if (POINTERS_EXTEND_UNSIGNED > 0) |
2322 x = gen_rtx_ZERO_EXTEND (mode, x); | 2319 x = gen_rtx_ZERO_EXTEND (mode, x); |
2323 else if (!POINTERS_EXTEND_UNSIGNED) | 2320 else if (!POINTERS_EXTEND_UNSIGNED) |
2324 x = gen_rtx_SIGN_EXTEND (mode, x); | 2321 x = gen_rtx_SIGN_EXTEND (mode, x); |
2325 else | 2322 else |
2408 break; | 2405 break; |
2409 | 2406 |
2410 ternary: | 2407 ternary: |
2411 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | 2408 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); |
2412 if (!op2) | 2409 if (!op2) |
2413 return NULL_RTX; | 2410 return NULL_RTX; |
2414 /* Fall through. */ | 2411 /* Fall through. */ |
2415 | 2412 |
2416 binary: | 2413 binary: |
2417 case tcc_binary: | 2414 case tcc_binary: |
2418 case tcc_comparison: | 2415 case tcc_comparison: |
2419 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | 2416 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); |
2420 if (!op1) | 2417 if (!op1) |
2421 return NULL_RTX; | 2418 return NULL_RTX; |
2422 /* Fall through. */ | 2419 /* Fall through. */ |
2423 | 2420 |
2424 unary: | 2421 unary: |
2425 case tcc_unary: | 2422 case tcc_unary: |
2426 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 2423 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2427 if (!op0) | 2424 if (!op0) |
2428 return NULL_RTX; | 2425 return NULL_RTX; |
2429 break; | 2426 break; |
2430 | 2427 |
2431 case tcc_type: | 2428 case tcc_type: |
2432 case tcc_statement: | 2429 case tcc_statement: |
2433 gcc_unreachable (); | 2430 gcc_unreachable (); |
2442 | 2439 |
2443 switch (TREE_CODE (exp)) | 2440 switch (TREE_CODE (exp)) |
2444 { | 2441 { |
2445 case STRING_CST: | 2442 case STRING_CST: |
2446 if (!lookup_constant_def (exp)) | 2443 if (!lookup_constant_def (exp)) |
2447 { | 2444 { |
2448 if (strlen (TREE_STRING_POINTER (exp)) + 1 | 2445 if (strlen (TREE_STRING_POINTER (exp)) + 1 |
2449 != (size_t) TREE_STRING_LENGTH (exp)) | 2446 != (size_t) TREE_STRING_LENGTH (exp)) |
2450 return NULL_RTX; | 2447 return NULL_RTX; |
2451 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); | 2448 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); |
2452 op0 = gen_rtx_MEM (BLKmode, op0); | 2449 op0 = gen_rtx_MEM (BLKmode, op0); |
2453 set_mem_attributes (op0, exp, 0); | 2450 set_mem_attributes (op0, exp, 0); |
2454 return op0; | 2451 return op0; |
2455 } | 2452 } |
2456 /* Fall through... */ | 2453 /* Fall through... */ |
2457 | 2454 |
2458 case INTEGER_CST: | 2455 case INTEGER_CST: |
2459 case REAL_CST: | 2456 case REAL_CST: |
2460 case FIXED_CST: | 2457 case FIXED_CST: |
2469 | 2466 |
2470 case DEBUG_EXPR_DECL: | 2467 case DEBUG_EXPR_DECL: |
2471 op0 = DECL_RTL_IF_SET (exp); | 2468 op0 = DECL_RTL_IF_SET (exp); |
2472 | 2469 |
2473 if (op0) | 2470 if (op0) |
2474 return op0; | 2471 return op0; |
2475 | 2472 |
2476 op0 = gen_rtx_DEBUG_EXPR (mode); | 2473 op0 = gen_rtx_DEBUG_EXPR (mode); |
2477 DEBUG_EXPR_TREE_DECL (op0) = exp; | 2474 DEBUG_EXPR_TREE_DECL (op0) = exp; |
2478 SET_DECL_RTL (exp, op0); | 2475 SET_DECL_RTL (exp, op0); |
2479 | 2476 |
2503 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | 2500 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF |
2504 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | 2501 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) |
2505 return NULL; | 2502 return NULL; |
2506 } | 2503 } |
2507 else | 2504 else |
2508 op0 = copy_rtx (op0); | 2505 op0 = copy_rtx (op0); |
2509 | 2506 |
2510 if (GET_MODE (op0) == BLKmode | 2507 if (GET_MODE (op0) == BLKmode |
2511 /* If op0 is not BLKmode, but BLKmode is, adjust_mode | 2508 /* If op0 is not BLKmode, but BLKmode is, adjust_mode |
2512 below would ICE. While it is likely a FE bug, | 2509 below would ICE. While it is likely a FE bug, |
2513 try to be robust here. See PR43166. */ | 2510 try to be robust here. See PR43166. */ |
2591 } | 2588 } |
2592 /* FALLTHROUGH */ | 2589 /* FALLTHROUGH */ |
2593 case INDIRECT_REF: | 2590 case INDIRECT_REF: |
2594 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 2591 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2595 if (!op0) | 2592 if (!op0) |
2596 return NULL; | 2593 return NULL; |
2597 | 2594 |
2598 if (TREE_CODE (exp) == MEM_REF) | 2595 if (TREE_CODE (exp) == MEM_REF) |
2599 { | 2596 { |
2600 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR | 2597 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR |
2601 || (GET_CODE (op0) == PLUS | 2598 || (GET_CODE (op0) == PLUS |
2634 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR | 2631 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR |
2635 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0))) | 2632 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0))) |
2636 return NULL; | 2633 return NULL; |
2637 | 2634 |
2638 op0 = expand_debug_expr | 2635 op0 = expand_debug_expr |
2639 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); | 2636 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); |
2640 if (!op0) | 2637 if (!op0) |
2641 return NULL; | 2638 return NULL; |
2642 | 2639 |
2643 if (POINTER_TYPE_P (TREE_TYPE (exp))) | 2640 if (POINTER_TYPE_P (TREE_TYPE (exp))) |
2644 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | 2641 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); |
2645 else | 2642 else |
2646 as = ADDR_SPACE_GENERIC; | 2643 as = ADDR_SPACE_GENERIC; |
2791 case BIT_NOT_EXPR: | 2788 case BIT_NOT_EXPR: |
2792 return gen_rtx_NOT (mode, op0); | 2789 return gen_rtx_NOT (mode, op0); |
2793 | 2790 |
2794 case FLOAT_EXPR: | 2791 case FLOAT_EXPR: |
2795 if (unsignedp) | 2792 if (unsignedp) |
2796 return gen_rtx_UNSIGNED_FLOAT (mode, op0); | 2793 return gen_rtx_UNSIGNED_FLOAT (mode, op0); |
2797 else | 2794 else |
2798 return gen_rtx_FLOAT (mode, op0); | 2795 return gen_rtx_FLOAT (mode, op0); |
2799 | 2796 |
2800 case FIX_TRUNC_EXPR: | 2797 case FIX_TRUNC_EXPR: |
2801 if (unsignedp) | 2798 if (unsignedp) |
2802 return gen_rtx_UNSIGNED_FIX (mode, op0); | 2799 return gen_rtx_UNSIGNED_FIX (mode, op0); |
2803 else | 2800 else |
2804 return gen_rtx_FIX (mode, op0); | 2801 return gen_rtx_FIX (mode, op0); |
2805 | 2802 |
2806 case POINTER_PLUS_EXPR: | 2803 case POINTER_PLUS_EXPR: |
2807 /* For the rare target where pointers are not the same size as | 2804 /* For the rare target where pointers are not the same size as |
2808 size_t, we need to check for mis-matched modes and correct | 2805 size_t, we need to check for mis-matched modes and correct |
2809 the addend. */ | 2806 the addend. */ |
2831 | 2828 |
2832 case RDIV_EXPR: | 2829 case RDIV_EXPR: |
2833 case TRUNC_DIV_EXPR: | 2830 case TRUNC_DIV_EXPR: |
2834 case EXACT_DIV_EXPR: | 2831 case EXACT_DIV_EXPR: |
2835 if (unsignedp) | 2832 if (unsignedp) |
2836 return gen_rtx_UDIV (mode, op0, op1); | 2833 return gen_rtx_UDIV (mode, op0, op1); |
2837 else | 2834 else |
2838 return gen_rtx_DIV (mode, op0, op1); | 2835 return gen_rtx_DIV (mode, op0, op1); |
2839 | 2836 |
2840 case TRUNC_MOD_EXPR: | 2837 case TRUNC_MOD_EXPR: |
2841 if (unsignedp) | 2838 if (unsignedp) |
2842 return gen_rtx_UMOD (mode, op0, op1); | 2839 return gen_rtx_UMOD (mode, op0, op1); |
2843 else | 2840 else |
2844 return gen_rtx_MOD (mode, op0, op1); | 2841 return gen_rtx_MOD (mode, op0, op1); |
2845 | 2842 |
2846 case FLOOR_DIV_EXPR: | 2843 case FLOOR_DIV_EXPR: |
2847 if (unsignedp) | 2844 if (unsignedp) |
2848 return gen_rtx_UDIV (mode, op0, op1); | 2845 return gen_rtx_UDIV (mode, op0, op1); |
2849 else | 2846 else |
2850 { | 2847 { |
2851 rtx div = gen_rtx_DIV (mode, op0, op1); | 2848 rtx div = gen_rtx_DIV (mode, op0, op1); |
2852 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2849 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2853 rtx adj = floor_sdiv_adjust (mode, mod, op1); | 2850 rtx adj = floor_sdiv_adjust (mode, mod, op1); |
2854 return gen_rtx_PLUS (mode, div, adj); | 2851 return gen_rtx_PLUS (mode, div, adj); |
2855 } | 2852 } |
2856 | 2853 |
2857 case FLOOR_MOD_EXPR: | 2854 case FLOOR_MOD_EXPR: |
2858 if (unsignedp) | 2855 if (unsignedp) |
2859 return gen_rtx_UMOD (mode, op0, op1); | 2856 return gen_rtx_UMOD (mode, op0, op1); |
2860 else | 2857 else |
2861 { | 2858 { |
2862 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2859 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2863 rtx adj = floor_sdiv_adjust (mode, mod, op1); | 2860 rtx adj = floor_sdiv_adjust (mode, mod, op1); |
2864 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 2861 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); |
2865 return gen_rtx_PLUS (mode, mod, adj); | 2862 return gen_rtx_PLUS (mode, mod, adj); |
2866 } | 2863 } |
2867 | 2864 |
2868 case CEIL_DIV_EXPR: | 2865 case CEIL_DIV_EXPR: |
2869 if (unsignedp) | 2866 if (unsignedp) |
2870 { | 2867 { |
2871 rtx div = gen_rtx_UDIV (mode, op0, op1); | 2868 rtx div = gen_rtx_UDIV (mode, op0, op1); |
2872 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 2869 rtx mod = gen_rtx_UMOD (mode, op0, op1); |
2873 rtx adj = ceil_udiv_adjust (mode, mod, op1); | 2870 rtx adj = ceil_udiv_adjust (mode, mod, op1); |
2874 return gen_rtx_PLUS (mode, div, adj); | 2871 return gen_rtx_PLUS (mode, div, adj); |
2875 } | 2872 } |
2876 else | 2873 else |
2877 { | 2874 { |
2878 rtx div = gen_rtx_DIV (mode, op0, op1); | 2875 rtx div = gen_rtx_DIV (mode, op0, op1); |
2879 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2876 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2880 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | 2877 rtx adj = ceil_sdiv_adjust (mode, mod, op1); |
2881 return gen_rtx_PLUS (mode, div, adj); | 2878 return gen_rtx_PLUS (mode, div, adj); |
2882 } | 2879 } |
2883 | 2880 |
2884 case CEIL_MOD_EXPR: | 2881 case CEIL_MOD_EXPR: |
2885 if (unsignedp) | 2882 if (unsignedp) |
2886 { | 2883 { |
2887 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 2884 rtx mod = gen_rtx_UMOD (mode, op0, op1); |
2888 rtx adj = ceil_udiv_adjust (mode, mod, op1); | 2885 rtx adj = ceil_udiv_adjust (mode, mod, op1); |
2889 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 2886 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); |
2890 return gen_rtx_PLUS (mode, mod, adj); | 2887 return gen_rtx_PLUS (mode, mod, adj); |
2891 } | 2888 } |
2892 else | 2889 else |
2893 { | 2890 { |
2894 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2891 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2895 rtx adj = ceil_sdiv_adjust (mode, mod, op1); | 2892 rtx adj = ceil_sdiv_adjust (mode, mod, op1); |
2896 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 2893 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); |
2897 return gen_rtx_PLUS (mode, mod, adj); | 2894 return gen_rtx_PLUS (mode, mod, adj); |
2898 } | 2895 } |
2899 | 2896 |
2900 case ROUND_DIV_EXPR: | 2897 case ROUND_DIV_EXPR: |
2901 if (unsignedp) | 2898 if (unsignedp) |
2902 { | 2899 { |
2903 rtx div = gen_rtx_UDIV (mode, op0, op1); | 2900 rtx div = gen_rtx_UDIV (mode, op0, op1); |
2904 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 2901 rtx mod = gen_rtx_UMOD (mode, op0, op1); |
2905 rtx adj = round_udiv_adjust (mode, mod, op1); | 2902 rtx adj = round_udiv_adjust (mode, mod, op1); |
2906 return gen_rtx_PLUS (mode, div, adj); | 2903 return gen_rtx_PLUS (mode, div, adj); |
2907 } | 2904 } |
2908 else | 2905 else |
2909 { | 2906 { |
2910 rtx div = gen_rtx_DIV (mode, op0, op1); | 2907 rtx div = gen_rtx_DIV (mode, op0, op1); |
2911 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2908 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2912 rtx adj = round_sdiv_adjust (mode, mod, op1); | 2909 rtx adj = round_sdiv_adjust (mode, mod, op1); |
2913 return gen_rtx_PLUS (mode, div, adj); | 2910 return gen_rtx_PLUS (mode, div, adj); |
2914 } | 2911 } |
2915 | 2912 |
2916 case ROUND_MOD_EXPR: | 2913 case ROUND_MOD_EXPR: |
2917 if (unsignedp) | 2914 if (unsignedp) |
2918 { | 2915 { |
2919 rtx mod = gen_rtx_UMOD (mode, op0, op1); | 2916 rtx mod = gen_rtx_UMOD (mode, op0, op1); |
2920 rtx adj = round_udiv_adjust (mode, mod, op1); | 2917 rtx adj = round_udiv_adjust (mode, mod, op1); |
2921 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 2918 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); |
2922 return gen_rtx_PLUS (mode, mod, adj); | 2919 return gen_rtx_PLUS (mode, mod, adj); |
2923 } | 2920 } |
2924 else | 2921 else |
2925 { | 2922 { |
2926 rtx mod = gen_rtx_MOD (mode, op0, op1); | 2923 rtx mod = gen_rtx_MOD (mode, op0, op1); |
2927 rtx adj = round_sdiv_adjust (mode, mod, op1); | 2924 rtx adj = round_sdiv_adjust (mode, mod, op1); |
2928 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | 2925 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); |
2929 return gen_rtx_PLUS (mode, mod, adj); | 2926 return gen_rtx_PLUS (mode, mod, adj); |
2930 } | 2927 } |
2931 | 2928 |
2932 case LSHIFT_EXPR: | 2929 case LSHIFT_EXPR: |
2933 return gen_rtx_ASHIFT (mode, op0, op1); | 2930 return gen_rtx_ASHIFT (mode, op0, op1); |
2934 | 2931 |
2935 case RSHIFT_EXPR: | 2932 case RSHIFT_EXPR: |
2936 if (unsignedp) | 2933 if (unsignedp) |
2937 return gen_rtx_LSHIFTRT (mode, op0, op1); | 2934 return gen_rtx_LSHIFTRT (mode, op0, op1); |
2938 else | 2935 else |
2939 return gen_rtx_ASHIFTRT (mode, op0, op1); | 2936 return gen_rtx_ASHIFTRT (mode, op0, op1); |
2940 | 2937 |
2941 case LROTATE_EXPR: | 2938 case LROTATE_EXPR: |
2942 return gen_rtx_ROTATE (mode, op0, op1); | 2939 return gen_rtx_ROTATE (mode, op0, op1); |
2943 | 2940 |
2944 case RROTATE_EXPR: | 2941 case RROTATE_EXPR: |
2945 return gen_rtx_ROTATERT (mode, op0, op1); | 2942 return gen_rtx_ROTATERT (mode, op0, op1); |
2946 | 2943 |
2947 case MIN_EXPR: | 2944 case MIN_EXPR: |
2948 if (unsignedp) | 2945 if (unsignedp) |
2949 return gen_rtx_UMIN (mode, op0, op1); | 2946 return gen_rtx_UMIN (mode, op0, op1); |
2950 else | 2947 else |
2951 return gen_rtx_SMIN (mode, op0, op1); | 2948 return gen_rtx_SMIN (mode, op0, op1); |
2952 | 2949 |
2953 case MAX_EXPR: | 2950 case MAX_EXPR: |
2954 if (unsignedp) | 2951 if (unsignedp) |
2955 return gen_rtx_UMAX (mode, op0, op1); | 2952 return gen_rtx_UMAX (mode, op0, op1); |
2956 else | 2953 else |
2957 return gen_rtx_SMAX (mode, op0, op1); | 2954 return gen_rtx_SMAX (mode, op0, op1); |
2958 | 2955 |
2959 case BIT_AND_EXPR: | 2956 case BIT_AND_EXPR: |
2960 case TRUTH_AND_EXPR: | 2957 case TRUTH_AND_EXPR: |
2961 return gen_rtx_AND (mode, op0, op1); | 2958 return gen_rtx_AND (mode, op0, op1); |
2962 | 2959 |
2977 case TRUTH_NOT_EXPR: | 2974 case TRUTH_NOT_EXPR: |
2978 return gen_rtx_EQ (mode, op0, const0_rtx); | 2975 return gen_rtx_EQ (mode, op0, const0_rtx); |
2979 | 2976 |
2980 case LT_EXPR: | 2977 case LT_EXPR: |
2981 if (unsignedp) | 2978 if (unsignedp) |
2982 return gen_rtx_LTU (mode, op0, op1); | 2979 return gen_rtx_LTU (mode, op0, op1); |
2983 else | 2980 else |
2984 return gen_rtx_LT (mode, op0, op1); | 2981 return gen_rtx_LT (mode, op0, op1); |
2985 | 2982 |
2986 case LE_EXPR: | 2983 case LE_EXPR: |
2987 if (unsignedp) | 2984 if (unsignedp) |
2988 return gen_rtx_LEU (mode, op0, op1); | 2985 return gen_rtx_LEU (mode, op0, op1); |
2989 else | 2986 else |
2990 return gen_rtx_LE (mode, op0, op1); | 2987 return gen_rtx_LE (mode, op0, op1); |
2991 | 2988 |
2992 case GT_EXPR: | 2989 case GT_EXPR: |
2993 if (unsignedp) | 2990 if (unsignedp) |
2994 return gen_rtx_GTU (mode, op0, op1); | 2991 return gen_rtx_GTU (mode, op0, op1); |
2995 else | 2992 else |
2996 return gen_rtx_GT (mode, op0, op1); | 2993 return gen_rtx_GT (mode, op0, op1); |
2997 | 2994 |
2998 case GE_EXPR: | 2995 case GE_EXPR: |
2999 if (unsignedp) | 2996 if (unsignedp) |
3000 return gen_rtx_GEU (mode, op0, op1); | 2997 return gen_rtx_GEU (mode, op0, op1); |
3001 else | 2998 else |
3002 return gen_rtx_GE (mode, op0, op1); | 2999 return gen_rtx_GE (mode, op0, op1); |
3003 | 3000 |
3004 case EQ_EXPR: | 3001 case EQ_EXPR: |
3005 return gen_rtx_EQ (mode, op0, op1); | 3002 return gen_rtx_EQ (mode, op0, op1); |
3006 | 3003 |
3007 case NE_EXPR: | 3004 case NE_EXPR: |
3035 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | 3032 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); |
3036 | 3033 |
3037 case COMPLEX_EXPR: | 3034 case COMPLEX_EXPR: |
3038 gcc_assert (COMPLEX_MODE_P (mode)); | 3035 gcc_assert (COMPLEX_MODE_P (mode)); |
3039 if (GET_MODE (op0) == VOIDmode) | 3036 if (GET_MODE (op0) == VOIDmode) |
3040 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); | 3037 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); |
3041 if (GET_MODE (op1) == VOIDmode) | 3038 if (GET_MODE (op1) == VOIDmode) |
3042 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); | 3039 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); |
3043 return gen_rtx_CONCAT (mode, op0, op1); | 3040 return gen_rtx_CONCAT (mode, op0, op1); |
3044 | 3041 |
3045 case CONJ_EXPR: | 3042 case CONJ_EXPR: |
3046 if (GET_CODE (op0) == CONCAT) | 3043 if (GET_CODE (op0) == CONCAT) |
3047 return gen_rtx_CONCAT (mode, XEXP (op0, 0), | 3044 return gen_rtx_CONCAT (mode, XEXP (op0, 0), |
3048 gen_rtx_NEG (GET_MODE_INNER (mode), | 3045 gen_rtx_NEG (GET_MODE_INNER (mode), |
3049 XEXP (op0, 1))); | 3046 XEXP (op0, 1))); |
3050 else | 3047 else |
3051 { | 3048 { |
3052 enum machine_mode imode = GET_MODE_INNER (mode); | 3049 enum machine_mode imode = GET_MODE_INNER (mode); |
3053 rtx re, im; | 3050 rtx re, im; |
3054 | 3051 |
3055 if (MEM_P (op0)) | 3052 if (MEM_P (op0)) |
3056 { | 3053 { |
3057 re = adjust_address_nv (op0, imode, 0); | 3054 re = adjust_address_nv (op0, imode, 0); |
3058 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | 3055 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); |
3059 } | 3056 } |
3060 else | 3057 else |
3061 { | 3058 { |
3062 enum machine_mode ifmode = int_mode_for_mode (mode); | 3059 enum machine_mode ifmode = int_mode_for_mode (mode); |
3063 enum machine_mode ihmode = int_mode_for_mode (imode); | 3060 enum machine_mode ihmode = int_mode_for_mode (imode); |
3064 rtx halfsize; | 3061 rtx halfsize; |
3065 if (ifmode == BLKmode || ihmode == BLKmode) | 3062 if (ifmode == BLKmode || ihmode == BLKmode) |
3066 return NULL; | 3063 return NULL; |
3067 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | 3064 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); |
3068 re = op0; | 3065 re = op0; |
3069 if (mode != ifmode) | 3066 if (mode != ifmode) |
3070 re = gen_rtx_SUBREG (ifmode, re, 0); | 3067 re = gen_rtx_SUBREG (ifmode, re, 0); |
3071 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); | 3068 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); |
3072 if (imode != ihmode) | 3069 if (imode != ihmode) |
3073 re = gen_rtx_SUBREG (imode, re, 0); | 3070 re = gen_rtx_SUBREG (imode, re, 0); |
3074 im = copy_rtx (op0); | 3071 im = copy_rtx (op0); |
3075 if (mode != ifmode) | 3072 if (mode != ifmode) |
3076 im = gen_rtx_SUBREG (ifmode, im, 0); | 3073 im = gen_rtx_SUBREG (ifmode, im, 0); |
3077 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); | 3074 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); |
3078 if (imode != ihmode) | 3075 if (imode != ihmode) |
3079 im = gen_rtx_SUBREG (imode, im, 0); | 3076 im = gen_rtx_SUBREG (imode, im, 0); |
3080 } | 3077 } |
3081 im = gen_rtx_NEG (imode, im); | 3078 im = gen_rtx_NEG (imode, im); |
3082 return gen_rtx_CONCAT (mode, re, im); | 3079 return gen_rtx_CONCAT (mode, re, im); |
3083 } | 3080 } |
3084 | 3081 |
3085 case ADDR_EXPR: | 3082 case ADDR_EXPR: |
3086 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 3083 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
3087 if (!op0 || !MEM_P (op0)) | 3084 if (!op0 || !MEM_P (op0)) |
3088 { | 3085 { |
3117 | 3114 |
3118 return op0; | 3115 return op0; |
3119 | 3116 |
3120 case VECTOR_CST: | 3117 case VECTOR_CST: |
3121 exp = build_constructor_from_list (TREE_TYPE (exp), | 3118 exp = build_constructor_from_list (TREE_TYPE (exp), |
3122 TREE_VECTOR_CST_ELTS (exp)); | 3119 TREE_VECTOR_CST_ELTS (exp)); |
3123 /* Fall through. */ | 3120 /* Fall through. */ |
3124 | 3121 |
3125 case CONSTRUCTOR: | 3122 case CONSTRUCTOR: |
3126 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | 3123 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) |
3127 { | 3124 { |
3152 } | 3149 } |
3153 | 3150 |
3154 return op0; | 3151 return op0; |
3155 } | 3152 } |
3156 else | 3153 else |
3157 goto flag_unsupported; | 3154 goto flag_unsupported; |
3158 | 3155 |
3159 case CALL_EXPR: | 3156 case CALL_EXPR: |
3160 /* ??? Maybe handle some builtins? */ | 3157 /* ??? Maybe handle some builtins? */ |
3161 return NULL; | 3158 return NULL; |
3162 | 3159 |
3301 flag_strict_aliasing = 0; | 3298 flag_strict_aliasing = 0; |
3302 | 3299 |
3303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | 3300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
3304 if (DEBUG_INSN_P (insn)) | 3301 if (DEBUG_INSN_P (insn)) |
3305 { | 3302 { |
3306 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | 3303 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); |
3307 rtx val; | 3304 rtx val; |
3308 enum machine_mode mode; | 3305 enum machine_mode mode; |
3309 | 3306 |
3310 if (value == NULL_TREE) | 3307 if (value == NULL_TREE) |
3311 val = NULL_RTX; | 3308 val = NULL_RTX; |
3312 else | 3309 else |
3313 { | 3310 { |
3314 val = expand_debug_expr (value); | 3311 val = expand_debug_expr (value); |
3315 gcc_assert (last == get_last_insn ()); | 3312 gcc_assert (last == get_last_insn ()); |
3316 } | 3313 } |
3317 | 3314 |
3318 if (!val) | 3315 if (!val) |
3319 val = gen_rtx_UNKNOWN_VAR_LOC (); | 3316 val = gen_rtx_UNKNOWN_VAR_LOC (); |
3320 else | 3317 else |
3321 { | 3318 { |
3322 mode = GET_MODE (INSN_VAR_LOCATION (insn)); | 3319 mode = GET_MODE (INSN_VAR_LOCATION (insn)); |
3323 | 3320 |
3324 gcc_assert (mode == GET_MODE (val) | 3321 gcc_assert (mode == GET_MODE (val) |
3325 || (GET_MODE (val) == VOIDmode | 3322 || (GET_MODE (val) == VOIDmode |
3326 && (CONST_INT_P (val) | 3323 && (CONST_INT_P (val) |
3327 || GET_CODE (val) == CONST_FIXED | 3324 || GET_CODE (val) == CONST_FIXED |
3328 || GET_CODE (val) == CONST_DOUBLE | 3325 || GET_CODE (val) == CONST_DOUBLE |
3329 || GET_CODE (val) == LABEL_REF))); | 3326 || GET_CODE (val) == LABEL_REF))); |
3330 } | 3327 } |
3331 | 3328 |
3332 INSN_VAR_LOCATION_LOC (insn) = val; | 3329 INSN_VAR_LOCATION_LOC (insn) = val; |
3333 } | 3330 } |
3334 | 3331 |
3335 flag_strict_aliasing = save_strict_alias; | 3332 flag_strict_aliasing = save_strict_alias; |
3336 } | 3333 } |
3337 | 3334 |
3348 edge_iterator ei; | 3345 edge_iterator ei; |
3349 void **elt; | 3346 void **elt; |
3350 | 3347 |
3351 if (dump_file) | 3348 if (dump_file) |
3352 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", | 3349 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
3353 bb->index); | 3350 bb->index); |
3354 | 3351 |
3355 /* Note that since we are now transitioning from GIMPLE to RTL, we | 3352 /* Note that since we are now transitioning from GIMPLE to RTL, we |
3356 cannot use the gsi_*_bb() routines because they expect the basic | 3353 cannot use the gsi_*_bb() routines because they expect the basic |
3357 block to be in GIMPLE, instead of RTL. Therefore, we need to | 3354 block to be in GIMPLE, instead of RTL. Therefore, we need to |
3358 access the BB sequence directly. */ | 3355 access the BB sequence directly. */ |
3372 | 3369 |
3373 gcc_assert (single_succ_p (bb)); | 3370 gcc_assert (single_succ_p (bb)); |
3374 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | 3371 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); |
3375 | 3372 |
3376 if (bb->next_bb == EXIT_BLOCK_PTR | 3373 if (bb->next_bb == EXIT_BLOCK_PTR |
3377 && !gimple_return_retval (ret_stmt)) | 3374 && !gimple_return_retval (ret_stmt)) |
3378 { | 3375 { |
3379 gsi_remove (&gsi, false); | 3376 gsi_remove (&gsi, false); |
3380 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; | 3377 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
3381 } | 3378 } |
3382 } | 3379 } |
3383 | 3380 |
3384 gsi = gsi_start (stmts); | 3381 gsi = gsi_start (stmts); |
3385 if (!gsi_end_p (gsi)) | 3382 if (!gsi_end_p (gsi)) |
3386 { | 3383 { |
3387 stmt = gsi_stmt (gsi); | 3384 stmt = gsi_stmt (gsi); |
3388 if (gimple_code (stmt) != GIMPLE_LABEL) | 3385 if (gimple_code (stmt) != GIMPLE_LABEL) |
3389 stmt = NULL; | 3386 stmt = NULL; |
3390 } | 3387 } |
3391 | 3388 |
3392 elt = pointer_map_contains (lab_rtx_for_bb, bb); | 3389 elt = pointer_map_contains (lab_rtx_for_bb, bb); |
3393 | 3390 |
3394 if (stmt || elt) | 3391 if (stmt || elt) |
3395 { | 3392 { |
3396 last = get_last_insn (); | 3393 last = get_last_insn (); |
3397 | 3394 |
3398 if (stmt) | 3395 if (stmt) |
3399 { | 3396 { |
3400 expand_gimple_stmt (stmt); | 3397 expand_gimple_stmt (stmt); |
3401 gsi_next (&gsi); | 3398 gsi_next (&gsi); |
3402 } | 3399 } |
3403 | 3400 |
3404 if (elt) | 3401 if (elt) |
3405 emit_label ((rtx) *elt); | 3402 emit_label ((rtx) *elt); |
3406 | 3403 |
3407 /* Java emits line number notes in the top of labels. | 3404 /* Java emits line number notes in the top of labels. |
3408 ??? Make this go away once line number notes are obsoleted. */ | 3405 ??? Make this go away once line number notes are obsoleted. */ |
3409 BB_HEAD (bb) = NEXT_INSN (last); | 3406 BB_HEAD (bb) = NEXT_INSN (last); |
3410 if (NOTE_P (BB_HEAD (bb))) | 3407 if (NOTE_P (BB_HEAD (bb))) |
3411 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); | 3408 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
3412 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); | 3409 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
3413 | 3410 |
3414 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 3411 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
3415 } | 3412 } |
3416 else | 3413 else |
3523 } | 3520 } |
3524 | 3521 |
3525 currently_expanding_gimple_stmt = stmt; | 3522 currently_expanding_gimple_stmt = stmt; |
3526 | 3523 |
3527 /* Expand this statement, then evaluate the resulting RTL and | 3524 /* Expand this statement, then evaluate the resulting RTL and |
3528 fixup the CFG accordingly. */ | 3525 fixup the CFG accordingly. */ |
3529 if (gimple_code (stmt) == GIMPLE_COND) | 3526 if (gimple_code (stmt) == GIMPLE_COND) |
3530 { | 3527 { |
3531 new_bb = expand_gimple_cond (bb, stmt); | 3528 new_bb = expand_gimple_cond (bb, stmt); |
3532 if (new_bb) | 3529 if (new_bb) |
3533 return new_bb; | 3530 return new_bb; |
3534 } | 3531 } |
3535 else if (gimple_debug_bind_p (stmt)) | 3532 else if (gimple_debug_bind_p (stmt)) |
3536 { | 3533 { |
3537 location_t sloc = get_curr_insn_source_location (); | 3534 location_t sloc = get_curr_insn_source_location (); |
3538 tree sblock = get_curr_insn_block (); | 3535 tree sblock = get_curr_insn_block (); |
3539 gimple_stmt_iterator nsi = gsi; | 3536 gimple_stmt_iterator nsi = gsi; |
3592 | 3589 |
3593 set_curr_insn_source_location (sloc); | 3590 set_curr_insn_source_location (sloc); |
3594 set_curr_insn_block (sblock); | 3591 set_curr_insn_block (sblock); |
3595 } | 3592 } |
3596 else | 3593 else |
3597 { | 3594 { |
3598 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) | 3595 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
3599 { | 3596 { |
3600 bool can_fallthru; | 3597 bool can_fallthru; |
3601 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | 3598 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); |
3602 if (new_bb) | 3599 if (new_bb) |
3603 { | 3600 { |
3604 if (can_fallthru) | 3601 if (can_fallthru) |
3605 bb = new_bb; | 3602 bb = new_bb; |
3606 else | 3603 else |
3607 return new_bb; | 3604 return new_bb; |
3608 } | 3605 } |
3609 } | 3606 } |
3610 else | 3607 else |
3611 { | 3608 { |
3612 def_operand_p def_p; | 3609 def_operand_p def_p; |
3613 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); | 3610 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); |
3614 | 3611 |
3615 if (def_p != NULL) | 3612 if (def_p != NULL) |
3616 { | 3613 { |
3617 /* Ignore this stmt if it is in the list of | 3614 /* Ignore this stmt if it is in the list of |
3618 replaceable expressions. */ | 3615 replaceable expressions. */ |
3619 if (SA.values | 3616 if (SA.values |
3620 && bitmap_bit_p (SA.values, | 3617 && bitmap_bit_p (SA.values, |
3621 SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) | 3618 SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) |
3622 continue; | 3619 continue; |
3623 } | 3620 } |
3624 last = expand_gimple_stmt (stmt); | 3621 last = expand_gimple_stmt (stmt); |
3625 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 3622 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
3626 } | 3623 } |
3627 } | 3624 } |
3628 } | 3625 } |
3629 | 3626 |
3630 currently_expanding_gimple_stmt = NULL; | 3627 currently_expanding_gimple_stmt = NULL; |
3631 | 3628 |
3632 /* Expand implicit goto and convert goto_locus. */ | 3629 /* Expand implicit goto and convert goto_locus. */ |
3633 FOR_EACH_EDGE (e, ei, bb->succs) | 3630 FOR_EACH_EDGE (e, ei, bb->succs) |
3634 { | 3631 { |
3635 if (e->goto_locus && e->goto_block) | 3632 if (e->goto_locus && e->goto_block) |
3636 { | 3633 { |
3637 set_curr_insn_source_location (e->goto_locus); | 3634 set_curr_insn_source_location (e->goto_locus); |
3638 set_curr_insn_block (e->goto_block); | 3635 set_curr_insn_block (e->goto_block); |
3639 e->goto_locus = curr_insn_locator (); | 3636 e->goto_locus = curr_insn_locator (); |
3640 } | 3637 } |
3641 e->goto_block = NULL; | 3638 e->goto_block = NULL; |
3642 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | 3639 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) |
3643 { | 3640 { |
3644 emit_jump (label_rtx_for_bb (e->dest)); | 3641 emit_jump (label_rtx_for_bb (e->dest)); |
3645 e->flags &= ~EDGE_FALLTHRU; | 3642 e->flags &= ~EDGE_FALLTHRU; |
3646 } | 3643 } |
3647 } | 3644 } |
3648 | 3645 |
3649 /* Expanded RTL can create a jump in the last instruction of block. | 3646 /* Expanded RTL can create a jump in the last instruction of block. |
3650 This later might be assumed to be a jump to successor and break edge insertion. | 3647 This later might be assumed to be a jump to successor and break edge insertion. |
3651 We need to insert dummy move to prevent this. PR41440. */ | 3648 We need to insert dummy move to prevent this. PR41440. */ |
3704 } | 3701 } |
3705 else | 3702 else |
3706 flags = EDGE_FALLTHRU; | 3703 flags = EDGE_FALLTHRU; |
3707 | 3704 |
3708 init_block = create_basic_block (NEXT_INSN (get_insns ()), | 3705 init_block = create_basic_block (NEXT_INSN (get_insns ()), |
3709 get_last_insn (), | 3706 get_last_insn (), |
3710 ENTRY_BLOCK_PTR); | 3707 ENTRY_BLOCK_PTR); |
3711 init_block->frequency = ENTRY_BLOCK_PTR->frequency; | 3708 init_block->frequency = ENTRY_BLOCK_PTR->frequency; |
3712 init_block->count = ENTRY_BLOCK_PTR->count; | 3709 init_block->count = ENTRY_BLOCK_PTR->count; |
3713 if (e) | 3710 if (e) |
3714 { | 3711 { |
3715 first_block = e->dest; | 3712 first_block = e->dest; |
3772 */ | 3769 */ |
3773 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | 3770 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; |
3774 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) | 3771 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
3775 head = NEXT_INSN (head); | 3772 head = NEXT_INSN (head); |
3776 exit_block = create_basic_block (NEXT_INSN (head), end, | 3773 exit_block = create_basic_block (NEXT_INSN (head), end, |
3777 EXIT_BLOCK_PTR->prev_bb); | 3774 EXIT_BLOCK_PTR->prev_bb); |
3778 exit_block->frequency = EXIT_BLOCK_PTR->frequency; | 3775 exit_block->frequency = EXIT_BLOCK_PTR->frequency; |
3779 exit_block->count = EXIT_BLOCK_PTR->count; | 3776 exit_block->count = EXIT_BLOCK_PTR->count; |
3780 | 3777 |
3781 ix = 0; | 3778 ix = 0; |
3782 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | 3779 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) |
3783 { | 3780 { |
3784 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); | 3781 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); |
3785 if (!(e->flags & EDGE_ABNORMAL)) | 3782 if (!(e->flags & EDGE_ABNORMAL)) |
3786 redirect_edge_succ (e, exit_block); | 3783 redirect_edge_succ (e, exit_block); |
3787 else | 3784 else |
3788 ix++; | 3785 ix++; |
3789 } | 3786 } |
3790 | 3787 |
3791 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | 3788 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
3792 e->probability = REG_BR_PROB_BASE; | 3789 e->probability = REG_BR_PROB_BASE; |
3793 e->count = EXIT_BLOCK_PTR->count; | 3790 e->count = EXIT_BLOCK_PTR->count; |
3794 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) | 3791 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) |
3795 if (e2 != e) | 3792 if (e2 != e) |
3796 { | 3793 { |
3797 e->count -= e2->count; | 3794 e->count -= e2->count; |
3798 exit_block->count -= e2->count; | 3795 exit_block->count -= e2->count; |
3799 exit_block->frequency -= EDGE_FREQUENCY (e2); | 3796 exit_block->frequency -= EDGE_FREQUENCY (e2); |
3800 } | 3797 } |
3801 if (e->count < 0) | 3798 if (e->count < 0) |
3802 e->count = 0; | 3799 e->count = 0; |
3803 if (exit_block->count < 0) | 3800 if (exit_block->count < 0) |
3804 exit_block->count = 0; | 3801 exit_block->count = 0; |
3811 Look for ARRAY_REF nodes with non-constant indexes and mark them | 3808 Look for ARRAY_REF nodes with non-constant indexes and mark them |
3812 addressable. */ | 3809 addressable. */ |
3813 | 3810 |
3814 static tree | 3811 static tree |
3815 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | 3812 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, |
3816 void *data ATTRIBUTE_UNUSED) | 3813 void *data ATTRIBUTE_UNUSED) |
3817 { | 3814 { |
3818 tree t = *tp; | 3815 tree t = *tp; |
3819 | 3816 |
3820 if (IS_TYPE_OR_DECL_P (t)) | 3817 if (IS_TYPE_OR_DECL_P (t)) |
3821 *walk_subtrees = 0; | 3818 *walk_subtrees = 0; |
3822 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3819 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
3823 { | 3820 { |
3824 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3821 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
3825 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | 3822 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) |
3826 && (!TREE_OPERAND (t, 2) | 3823 && (!TREE_OPERAND (t, 2) |
3827 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | 3824 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) |
3828 || (TREE_CODE (t) == COMPONENT_REF | 3825 || (TREE_CODE (t) == COMPONENT_REF |
3829 && (!TREE_OPERAND (t,2) | 3826 && (!TREE_OPERAND (t,2) |
3830 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | 3827 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) |
3831 || TREE_CODE (t) == BIT_FIELD_REF | 3828 || TREE_CODE (t) == BIT_FIELD_REF |
3832 || TREE_CODE (t) == REALPART_EXPR | 3829 || TREE_CODE (t) == REALPART_EXPR |
3833 || TREE_CODE (t) == IMAGPART_EXPR | 3830 || TREE_CODE (t) == IMAGPART_EXPR |
3834 || TREE_CODE (t) == VIEW_CONVERT_EXPR | 3831 || TREE_CODE (t) == VIEW_CONVERT_EXPR |
3835 || CONVERT_EXPR_P (t)) | 3832 || CONVERT_EXPR_P (t)) |
3836 t = TREE_OPERAND (t, 0); | 3833 t = TREE_OPERAND (t, 0); |
3837 | 3834 |
3838 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | 3835 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
3839 { | 3836 { |
3840 t = get_base_address (t); | 3837 t = get_base_address (t); |
3841 if (t && DECL_P (t) | 3838 if (t && DECL_P (t) |
3842 && DECL_MODE (t) != BLKmode) | 3839 && DECL_MODE (t) != BLKmode) |
3843 TREE_ADDRESSABLE (t) = 1; | 3840 TREE_ADDRESSABLE (t) = 1; |
3844 } | 3841 } |
3845 | 3842 |
3846 *walk_subtrees = 0; | 3843 *walk_subtrees = 0; |
3847 } | 3844 } |
3848 | 3845 |
3849 return NULL_TREE; | 3846 return NULL_TREE; |
3913 crtl->stack_alignment_estimated = preferred_stack_boundary; | 3910 crtl->stack_alignment_estimated = preferred_stack_boundary; |
3914 if (preferred_stack_boundary > crtl->stack_alignment_needed) | 3911 if (preferred_stack_boundary > crtl->stack_alignment_needed) |
3915 crtl->stack_alignment_needed = preferred_stack_boundary; | 3912 crtl->stack_alignment_needed = preferred_stack_boundary; |
3916 | 3913 |
3917 gcc_assert (crtl->stack_alignment_needed | 3914 gcc_assert (crtl->stack_alignment_needed |
3918 <= crtl->stack_alignment_estimated); | 3915 <= crtl->stack_alignment_estimated); |
3919 | 3916 |
3920 crtl->stack_realign_needed | 3917 crtl->stack_realign_needed |
3921 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; | 3918 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; |
3922 crtl->stack_realign_tried = crtl->stack_realign_needed; | 3919 crtl->stack_realign_tried = crtl->stack_realign_needed; |
3923 | 3920 |
3963 | 3960 |
3964 timevar_push (TV_OUT_OF_SSA); | 3961 timevar_push (TV_OUT_OF_SSA); |
3965 rewrite_out_of_ssa (&SA); | 3962 rewrite_out_of_ssa (&SA); |
3966 timevar_pop (TV_OUT_OF_SSA); | 3963 timevar_pop (TV_OUT_OF_SSA); |
3967 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | 3964 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, |
3968 sizeof (rtx)); | 3965 sizeof (rtx)); |
3969 | 3966 |
3970 /* Some backends want to know that we are expanding to RTL. */ | 3967 /* Some backends want to know that we are expanding to RTL. */ |
3971 currently_expanding_to_rtl = 1; | 3968 currently_expanding_to_rtl = 1; |
3972 | 3969 |
3973 rtl_profile_for_bb (ENTRY_BLOCK_PTR); | 3970 rtl_profile_for_bb (ENTRY_BLOCK_PTR); |
4054 for (i = 0; i < SA.map->num_partitions; i++) | 4051 for (i = 0; i < SA.map->num_partitions; i++) |
4055 { | 4052 { |
4056 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | 4053 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); |
4057 | 4054 |
4058 if (TREE_CODE (var) != VAR_DECL | 4055 if (TREE_CODE (var) != VAR_DECL |
4059 && !SA.partition_to_pseudo[i]) | 4056 && !SA.partition_to_pseudo[i]) |
4060 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | 4057 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); |
4061 gcc_assert (SA.partition_to_pseudo[i]); | 4058 gcc_assert (SA.partition_to_pseudo[i]); |
4062 | 4059 |
4063 /* If this decl was marked as living in multiple places, reset | 4060 /* If this decl was marked as living in multiple places, reset |
4064 this now to NULL. */ | 4061 this now to NULL. */ |
4065 if (DECL_RTL_IF_SET (var) == pc_rtx) | 4062 if (DECL_RTL_IF_SET (var) == pc_rtx) |
4066 SET_DECL_RTL (var, NULL); | 4063 SET_DECL_RTL (var, NULL); |
4067 | 4064 |
4068 /* Some RTL parts really want to look at DECL_RTL(x) when x | 4065 /* Some RTL parts really want to look at DECL_RTL(x) when x |
4069 was a decl marked in REG_ATTR or MEM_ATTR. We could use | 4066 was a decl marked in REG_ATTR or MEM_ATTR. We could use |
4070 SET_DECL_RTL here making this available, but that would mean | 4067 SET_DECL_RTL here making this available, but that would mean |
4071 to select one of the potentially many RTLs for one DECL. Instead | 4068 to select one of the potentially many RTLs for one DECL. Instead |
4072 of doing that we simply reset the MEM_EXPR of the RTL in question, | 4069 of doing that we simply reset the MEM_EXPR of the RTL in question, |
4073 then nobody can get at it and hence nobody can call DECL_RTL on it. */ | 4070 then nobody can get at it and hence nobody can call DECL_RTL on it. */ |
4074 if (!DECL_RTL_SET_P (var)) | 4071 if (!DECL_RTL_SET_P (var)) |
4075 { | 4072 { |
4076 if (MEM_P (SA.partition_to_pseudo[i])) | 4073 if (MEM_P (SA.partition_to_pseudo[i])) |
4077 set_mem_expr (SA.partition_to_pseudo[i], NULL); | 4074 set_mem_expr (SA.partition_to_pseudo[i], NULL); |
4078 } | 4075 } |
4079 } | 4076 } |
4080 | 4077 |
4081 /* If this function is `main', emit a call to `__main' | 4078 /* If this function is `main', emit a call to `__main' |
4082 to run global initializers, etc. */ | 4079 to run global initializers, etc. */ |
4083 if (DECL_NAME (current_function_decl) | 4080 if (DECL_NAME (current_function_decl) |
4163 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) | 4160 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) |
4164 { | 4161 { |
4165 edge e; | 4162 edge e; |
4166 edge_iterator ei; | 4163 edge_iterator ei; |
4167 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 4164 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
4168 { | 4165 { |
4169 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | 4166 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ |
4170 e->flags &= ~EDGE_EXECUTABLE; | 4167 e->flags &= ~EDGE_EXECUTABLE; |
4171 | 4168 |
4172 /* At the moment not all abnormal edges match the RTL | 4169 /* At the moment not all abnormal edges match the RTL |
4173 representation. It is safe to remove them here as | 4170 representation. It is safe to remove them here as |
4174 find_many_sub_basic_blocks will rediscover them. | 4171 find_many_sub_basic_blocks will rediscover them. |
4175 In the future we should get this fixed properly. */ | 4172 In the future we should get this fixed properly. */ |
4176 if ((e->flags & EDGE_ABNORMAL) | 4173 if ((e->flags & EDGE_ABNORMAL) |
4177 && !(e->flags & EDGE_SIBCALL)) | 4174 && !(e->flags & EDGE_SIBCALL)) |
4178 remove_edge (e); | 4175 remove_edge (e); |
4179 else | 4176 else |
4180 ei_next (&ei); | 4177 ei_next (&ei); |
4181 } | 4178 } |
4182 } | 4179 } |
4183 | 4180 |
4184 blocks = sbitmap_alloc (last_basic_block); | 4181 blocks = sbitmap_alloc (last_basic_block); |
4185 sbitmap_ones (blocks); | 4182 sbitmap_ones (blocks); |
4186 find_many_sub_basic_blocks (blocks); | 4183 find_many_sub_basic_blocks (blocks); |
4204 generating_concat_p = 0; | 4201 generating_concat_p = 0; |
4205 | 4202 |
4206 if (dump_file) | 4203 if (dump_file) |
4207 { | 4204 { |
4208 fprintf (dump_file, | 4205 fprintf (dump_file, |
4209 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | 4206 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); |
4210 /* And the pass manager will dump RTL for us. */ | 4207 /* And the pass manager will dump RTL for us. */ |
4211 } | 4208 } |
4212 | 4209 |
4213 /* If we're emitting a nested function, make sure its parent gets | 4210 /* If we're emitting a nested function, make sure its parent gets |
4214 emitted as well. Doing otherwise confuses debug info. */ | 4211 emitted as well. Doing otherwise confuses debug info. */ |
4215 { | 4212 { |
4216 tree parent; | 4213 tree parent; |
4217 for (parent = DECL_CONTEXT (current_function_decl); | 4214 for (parent = DECL_CONTEXT (current_function_decl); |
4218 parent != NULL_TREE; | 4215 parent != NULL_TREE; |
4219 parent = get_containing_scope (parent)) | 4216 parent = get_containing_scope (parent)) |
4220 if (TREE_CODE (parent) == FUNCTION_DECL) | 4217 if (TREE_CODE (parent) == FUNCTION_DECL) |
4221 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; | 4218 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
4222 } | 4219 } |
4223 | 4220 |
4224 /* We are now committed to emitting code for this function. Do any | 4221 /* We are now committed to emitting code for this function. Do any |
4225 preparation, such as emitting abstract debug info for the inline | 4222 preparation, such as emitting abstract debug info for the inline |
4226 before it gets mangled by optimization. */ | 4223 before it gets mangled by optimization. */ |
4242 | 4239 |
4243 struct rtl_opt_pass pass_expand = | 4240 struct rtl_opt_pass pass_expand = |
4244 { | 4241 { |
4245 { | 4242 { |
4246 RTL_PASS, | 4243 RTL_PASS, |
4247 "expand", /* name */ | 4244 "expand", /* name */ |
4248 NULL, /* gate */ | 4245 NULL, /* gate */ |
4249 gimple_expand_cfg, /* execute */ | 4246 gimple_expand_cfg, /* execute */ |
4250 NULL, /* sub */ | 4247 NULL, /* sub */ |
4251 NULL, /* next */ | 4248 NULL, /* next */ |
4252 0, /* static_pass_number */ | 4249 0, /* static_pass_number */ |
4253 TV_EXPAND, /* tv_id */ | 4250 TV_EXPAND, /* tv_id */ |
4254 PROP_ssa | PROP_gimple_leh | PROP_cfg | 4251 PROP_ssa | PROP_gimple_leh | PROP_cfg |
4255 | PROP_gimple_lcx, /* properties_required */ | 4252 | PROP_gimple_lcx, /* properties_required */ |
4256 PROP_rtl, /* properties_provided */ | 4253 PROP_rtl, /* properties_provided */ |
4257 PROP_ssa | PROP_trees, /* properties_destroyed */ | 4254 PROP_ssa | PROP_trees, /* properties_destroyed */ |
4258 TODO_verify_ssa | TODO_verify_flow | 4255 TODO_verify_ssa | TODO_verify_flow |
4259 | TODO_verify_stmts, /* todo_flags_start */ | 4256 | TODO_verify_stmts, /* todo_flags_start */ |
4260 TODO_dump_func | 4257 TODO_dump_func |
4261 | TODO_ggc_collect /* todo_flags_finish */ | 4258 | TODO_ggc_collect /* todo_flags_finish */ |
4262 } | 4259 } |
4263 }; | 4260 }; |