Mercurial > hg > CbC > CbC_gcc
diff gcc/cfgexpand.c @ 70:b81903832de2
merge c-decl.c
author | Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 21 Aug 2011 09:24:16 +0900 |
parents | 1b10fe6932e1 |
children | ce75bd9117e4 |
line wrap: on
line diff
--- a/gcc/cfgexpand.c Sun Aug 21 07:53:12 2011 +0900 +++ b/gcc/cfgexpand.c Sun Aug 21 09:24:16 2011 +0900 @@ -44,9 +44,6 @@ #include "tree-inline.h" #include "value-prof.h" #include "target.h" -#ifndef noCbC -#include "cbc-tree.h" -#endif #include "ssaexpand.h" #include "bitmap.h" #include "sbitmap.h" @@ -79,13 +76,13 @@ gimple_assign_rhs3 (stmt)); else if (grhs_class == GIMPLE_BINARY_RHS) t = build2 (gimple_assign_rhs_code (stmt), - TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt)); + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt)); else if (grhs_class == GIMPLE_UNARY_RHS) t = build1 (gimple_assign_rhs_code (stmt), - TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs1 (stmt)); + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt)); else if (grhs_class == GIMPLE_SINGLE_RHS) { t = gimple_assign_rhs1 (stmt); @@ -126,29 +123,29 @@ { SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; if (x && !MEM_P (x)) - set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); + set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); /* For the benefit of debug information at -O0 (where vartracking doesn't run) record the place also in the base DECL if it's - a normal variable (not a parameter). */ + a normal variable (not a parameter). */ if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) - { - tree var = SSA_NAME_VAR (t); - /* If we don't yet have something recorded, just record it now. */ - if (!DECL_RTL_SET_P (var)) - SET_DECL_RTL (var, x); - /* If we have it set alrady to "multiple places" don't - change this. */ - else if (DECL_RTL (var) == pc_rtx) - ; - /* If we have something recorded and it's not the same place - as we want to record now, we have multiple partitions for the - same base variable, with different places. We can't just - randomly chose one, hence we have to say that we don't know. - This only happens with optimization, and there var-tracking - will figure out the right thing. */ - else if (DECL_RTL (var) != x) - SET_DECL_RTL (var, pc_rtx); - } + { + tree var = SSA_NAME_VAR (t); + /* If we don't yet have something recorded, just record it now. */ + if (!DECL_RTL_SET_P (var)) + SET_DECL_RTL (var, x); + /* If we have it set alrady to "multiple places" don't + change this. */ + else if (DECL_RTL (var) == pc_rtx) + ; + /* If we have something recorded and it's not the same place + as we want to record now, we have multiple partitions for the + same base variable, with different places. We can't just + randomly chose one, hence we have to say that we don't know. + This only happens with optimization, and there var-tracking + will figure out the right thing. */ + else if (DECL_RTL (var) != x) + SET_DECL_RTL (var, pc_rtx); + } } else SET_DECL_RTL (t, x); @@ -261,11 +258,11 @@ if (stack_vars_num >= stack_vars_alloc) { if (stack_vars_alloc) - stack_vars_alloc = stack_vars_alloc * 3 / 2; + stack_vars_alloc = stack_vars_alloc * 3 / 2; else - stack_vars_alloc = 32; + stack_vars_alloc = 32; stack_vars - = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); + = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); } v = &stack_vars[stack_vars_num]; @@ -336,7 +333,7 @@ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) if (TREE_CODE (field) == FIELD_DECL) if (aggregate_contains_union_type (TREE_TYPE (field))) - return true; + return true; return false; } @@ -364,21 +361,21 @@ contains_union = aggregate_contains_union_type (type_i); for (j = 0; j < i; ++j) - { - tree type_j = TREE_TYPE (stack_vars[j].decl); - bool aggr_j = AGGREGATE_TYPE_P (type_j); - if (aggr_i != aggr_j - /* Either the objects conflict by means of type based - aliasing rules, or we need to add a conflict. */ - || !objects_must_conflict_p (type_i, type_j) - /* In case the types do not conflict ensure that access - to elements will conflict. In case of unions we have - to be careful as type based aliasing rules may say - access to the same memory does not conflict. So play - safe and add a conflict in this case. */ - || contains_union) - add_stack_var_conflict (i, j); - } + { + tree type_j = TREE_TYPE (stack_vars[j].decl); + bool aggr_j = AGGREGATE_TYPE_P (type_j); + if (aggr_i != aggr_j + /* Either the objects conflict by means of type based + aliasing rules, or we need to add a conflict. */ + || !objects_must_conflict_p (type_i, type_j) + /* In case the types do not conflict ensure that access + to elements will conflict. In case of unions we have + to be careful as type based aliasing rules may say + access to the same memory does not conflict. So play + safe and add a conflict in this case. */ + || contains_union) + add_stack_var_conflict (i, j); + } } } @@ -423,9 +420,9 @@ if (TREE_CODE (decla) == SSA_NAME) { if (TREE_CODE (declb) == SSA_NAME) - uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); + uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); else - return -1; + return -1; } else if (TREE_CODE (declb) == SSA_NAME) return 1; @@ -445,8 +442,8 @@ static void add_partitioned_vars_to_ptset (struct pt_solution *pt, - struct pointer_map_t *decls_to_partitions, - struct pointer_set_t *visited, bitmap temp) + struct pointer_map_t *decls_to_partitions, + struct pointer_set_t *visited, bitmap temp) { bitmap_iterator bi; unsigned i; @@ -455,7 +452,7 @@ if (pt->anything || pt->vars == NULL /* The pointed-to vars bitmap is shared, it is enough to - visit it once. */ + visit it once. */ || pointer_set_insert(visited, pt->vars)) return; @@ -466,9 +463,9 @@ once. */ EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) if ((!temp - || !bitmap_bit_p (temp, i)) - && (part = (bitmap *) pointer_map_contains (decls_to_partitions, - (void *)(size_t) i))) + || !bitmap_bit_p (temp, i)) + && (part = (bitmap *) pointer_map_contains (decls_to_partitions, + (void *)(size_t) i))) bitmap_ior_into (temp, *part); if (!bitmap_empty_p (temp)) bitmap_ior_into (pt->vars, temp); @@ -498,16 +495,16 @@ continue; if (!decls_to_partitions) - { - decls_to_partitions = pointer_map_create (); - cfun->gimple_df->decls_to_pointers = pointer_map_create (); - } + { + decls_to_partitions = pointer_map_create (); + cfun->gimple_df->decls_to_pointers = pointer_map_create (); + } /* Create an SSA_NAME that points to the partition for use as base during alias-oracle queries on RTL for bases that - have been partitioned. */ + have been partitioned. */ if (var == NULL_TREE) - var = create_tmp_var (ptr_type_node, NULL); + var = create_tmp_var (ptr_type_node, NULL); name = make_ssa_name (var, NULL); /* Create bitmaps representing partitions. They will be used for @@ -545,16 +542,16 @@ bitmap temp = BITMAP_ALLOC (NULL); for (i = 1; i < num_ssa_names; i++) - { - tree name = ssa_name (i); - struct ptr_info_def *pi; - - if (name - && POINTER_TYPE_P (TREE_TYPE (name)) - && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) - add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, - visited, temp); - } + { + tree name = ssa_name (i); + struct ptr_info_def *pi; + + if (name + && POINTER_TYPE_P (TREE_TYPE (name)) + && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) + add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, + visited, temp); + } add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, decls_to_partitions, visited, temp); @@ -599,7 +596,7 @@ if (vb->conflicts) { EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) - add_stack_var_conflict (a, stack_vars[u].representative); + add_stack_var_conflict (a, stack_vars[u].representative); BITMAP_FREE (vb->conflicts); } } @@ -608,18 +605,18 @@ partitions constrained by the interference graph. The overall algorithm used is as follows: - Sort the objects by size. - For each object A { - S = size(A) - O = 0 - loop { - Look for the largest non-conflicting object B with size <= S. - UNION (A, B) - offset(B) = O - O += size(B) - S -= size(B) - } - } + Sort the objects by size. + For each object A { + S = size(A) + O = 0 + loop { + Look for the largest non-conflicting object B with size <= S. + UNION (A, B) + offset(B) = O + O += size(B) + S -= size(B) + } + } */ static void @@ -705,19 +702,19 @@ /* Skip variables that aren't partition representatives, for now. */ if (stack_vars[i].representative != i) - continue; + continue; fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC - " align %u\n", (unsigned long) i, stack_vars[i].size, - stack_vars[i].alignb); + " align %u\n", (unsigned long) i, stack_vars[i].size, + stack_vars[i].alignb); for (j = i; j != EOC; j = stack_vars[j].next) - { - fputc ('\t', dump_file); - print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); - fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", - stack_vars[j].offset); - } + { + fputc ('\t', dump_file); + print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); + fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", + stack_vars[j].offset); + } } } @@ -824,7 +821,7 @@ /* Skip variables that aren't partition representatives, for now. */ if (stack_vars[i].representative != i) - continue; + continue; /* Skip variables that have already had rtl assigned. See also add_stack_var where we perpetrate this pc_rtx hack. */ @@ -863,7 +860,7 @@ } /* Create rtl for each variable based on their location within the - partition. */ + partition. */ for (j = i; j != EOC; j = stack_vars[j].next) { gcc_assert (stack_vars[j].offset <= stack_vars[i].size); @@ -889,11 +886,11 @@ /* Skip variables that aren't partition representatives, for now. */ if (stack_vars[i].representative != i) - continue; + continue; size += stack_vars[i].size; for (j = i; j != EOC; j = stack_vars[j].next) - set_rtl (stack_vars[j].decl, NULL); + set_rtl (stack_vars[j].decl, NULL); } return size; } @@ -1027,10 +1024,10 @@ if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL) { /* Because we don't know if VAR will be in register or on stack, - we conservatively assume it will be on stack even if VAR is - eventually put into register after RA pass. For non-automatic - variables, which won't be on stack, we collect alignment of - type and ignore user specified alignment. */ + we conservatively assume it will be on stack even if VAR is + eventually put into register after RA pass. For non-automatic + variables, which won't be on stack, we collect alignment of + type and ignore user specified alignment. */ if (TREE_STATIC (var) || DECL_EXTERNAL (var)) align = MINIMUM_ALIGNMENT (TREE_TYPE (var), TYPE_MODE (TREE_TYPE (var)), @@ -1043,7 +1040,7 @@ changed from the offset chosen to it. */ align = crtl->stack_alignment_estimated; else - align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); + align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); /* If the variable alignment is very large we'll dynamicaly allocate it, which means that in-frame portion is just a pointer. */ @@ -1070,12 +1067,12 @@ if (TREE_CODE (origvar) == SSA_NAME) { gcc_assert (TREE_CODE (var) != VAR_DECL - || (!DECL_EXTERNAL (var) - && !DECL_HAS_VALUE_EXPR_P (var) - && !TREE_STATIC (var) - && TREE_TYPE (var) != error_mark_node - && !DECL_HARD_REGISTER (var) - && really_expand)); + || (!DECL_EXTERNAL (var) + && !DECL_HAS_VALUE_EXPR_P (var) + && !TREE_STATIC (var) + && TREE_TYPE (var) != error_mark_node + && !DECL_HARD_REGISTER (var) + && really_expand)); } if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) ; @@ -1155,8 +1152,8 @@ new_sv_num = stack_vars_num; for (i = old_sv_num; i < new_sv_num; ++i) - for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) - add_stack_var_conflict (i, j); + for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) + add_stack_var_conflict (i, j); } } @@ -1178,10 +1175,10 @@ /* Examine TYPE and determine a bit mask of the following features. */ -#define SPCT_HAS_LARGE_CHAR_ARRAY 1 -#define SPCT_HAS_SMALL_CHAR_ARRAY 2 -#define SPCT_HAS_ARRAY 4 -#define SPCT_HAS_AGGREGATE 8 +#define SPCT_HAS_LARGE_CHAR_ARRAY 1 +#define SPCT_HAS_SMALL_CHAR_ARRAY 2 +#define SPCT_HAS_ARRAY 4 +#define SPCT_HAS_AGGREGATE 8 static unsigned int stack_protect_classify_type (tree type) @@ -1194,25 +1191,25 @@ case ARRAY_TYPE: t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); if (t == char_type_node - || t == signed_char_type_node - || t == unsigned_char_type_node) - { - unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); - unsigned HOST_WIDE_INT len; - - if (!TYPE_SIZE_UNIT (type) - || !host_integerp (TYPE_SIZE_UNIT (type), 1)) - len = max; + || t == signed_char_type_node + || t == unsigned_char_type_node) + { + unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); + unsigned HOST_WIDE_INT len; + + if (!TYPE_SIZE_UNIT (type) + || !host_integerp (TYPE_SIZE_UNIT (type), 1)) + len = max; + else + len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); + + if (len < max) + ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; + else + ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; + } else - len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); - - if (len < max) - ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; - else - ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; - } - else - ret = SPCT_HAS_ARRAY; + ret = SPCT_HAS_ARRAY; break; case UNION_TYPE: @@ -1220,8 +1217,8 @@ case RECORD_TYPE: ret = SPCT_HAS_AGGREGATE; for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) - if (TREE_CODE (t) == FIELD_DECL) - ret |= stack_protect_classify_type (TREE_TYPE (t)); + if (TREE_CODE (t) == FIELD_DECL) + ret |= stack_protect_classify_type (TREE_TYPE (t)); break; default: @@ -1248,10 +1245,10 @@ if (flag_stack_protect == 2) { if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) - && !(bits & SPCT_HAS_AGGREGATE)) - ret = 1; + && !(bits & SPCT_HAS_AGGREGATE)) + ret = 1; else if (bits & SPCT_HAS_ARRAY) - ret = 2; + ret = 2; } else ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; @@ -1294,8 +1291,8 @@ { unsigned char ph_i = phase[i]; for (j = 0; j < i; ++j) - if (ph_i != phase[j]) - add_stack_var_conflict (i, j); + if (ph_i != phase[j]) + add_stack_var_conflict (i, j); } XDELETEVEC (phase); @@ -1307,7 +1304,7 @@ create_stack_guard (void) { tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), - VAR_DECL, NULL, ptr_type_node); + VAR_DECL, NULL, ptr_type_node); TREE_THIS_VOLATILE (guard) = 1; TREE_USED (guard) = 1; expand_one_stack_var (guard); @@ -1375,7 +1372,7 @@ /* Fake sorting the stack vars for account_stack_vars (). */ stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); for (i = 0; i < stack_vars_num; ++i) - stack_vars_sorted[i] = i; + stack_vars_sorted[i] = i; size += account_stack_vars (); fini_vars_expansion (); } @@ -1409,20 +1406,20 @@ gcc_assert (is_gimple_reg (var)); if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) - expand_one_var (var, true, true); + expand_one_var (var, true, true); else - { - /* This is a PARM_DECL or RESULT_DECL. For those partitions that - contain the default def (representing the parm or result itself) - we don't do anything here. But those which don't contain the - default def (representing a temporary based on the parm/result) - we need to allocate space just like for normal VAR_DECLs. */ - if (!bitmap_bit_p (SA.partition_has_default_def, i)) - { - expand_one_var (var, true, true); - gcc_assert (SA.partition_to_pseudo[i]); - } - } + { + /* This is a PARM_DECL or RESULT_DECL. For those partitions that + contain the default def (representing the parm or result itself) + we don't do anything here. But those which don't contain the + default def (representing a temporary based on the parm/result) + we need to allocate space just like for normal VAR_DECLs. */ + if (!bitmap_bit_p (SA.partition_has_default_def, i)) + { + expand_one_var (var, true, true); + gcc_assert (SA.partition_to_pseudo[i]); + } + } } /* At this point all variables on the local_decls with TREE_USED @@ -1440,21 +1437,21 @@ goto next; } /* We didn't set a block for static or extern because it's hard - to tell the difference between a global variable (re)declared - in a local scope, and one that's really declared there to - begin with. And it doesn't really matter much, since we're - not giving them stack space. Expand them now. */ + to tell the difference between a global variable (re)declared + in a local scope, and one that's really declared there to + begin with. And it doesn't really matter much, since we're + not giving them stack space. Expand them now. */ else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) - expand_now = true; + expand_now = true; /* If the variable is not associated with any block, then it - was created by the optimizers, and could be live anywhere - in the function. */ + was created by the optimizers, and could be live anywhere + in the function. */ else if (TREE_USED (var)) - expand_now = true; + expand_now = true; /* Finally, mark all variables on the list as used. We'll use - this in a moment when we expand those associated with scopes. */ + this in a moment when we expand those associated with scopes. */ TREE_USED (var) = 1; if (expand_now) @@ -1498,46 +1495,46 @@ if (stack_vars_num > 0) { /* Due to the way alias sets work, no variables with non-conflicting - alias sets may be assigned the same address. Add conflicts to - reflect this. */ + alias sets may be assigned the same address. Add conflicts to + reflect this. */ add_alias_set_conflicts (); /* If stack protection is enabled, we don't share space between - vulnerable data and non-vulnerable data. */ + vulnerable data and non-vulnerable data. */ if (flag_stack_protect) - add_stack_protection_conflicts (); + add_stack_protection_conflicts (); /* Now that we have collected all stack variables, and have computed a - minimal interference graph, attempt to save some stack space. */ + minimal interference graph, attempt to save some stack space. */ partition_stack_vars (); if (dump_file) - dump_stack_var_partition (); + dump_stack_var_partition (); } /* There are several conditions under which we should create a stack guard: protect-all, alloca used, protected decls present. */ if (flag_stack_protect == 2 || (flag_stack_protect - && (cfun->calls_alloca || has_protected_decls))) + && (cfun->calls_alloca || has_protected_decls))) create_stack_guard (); /* Assign rtl to each variable based on these partitions. */ if (stack_vars_num > 0) { /* Reorder decls to be protected by iterating over the variables - array multiple times, and allocating out of each phase in turn. */ + array multiple times, and allocating out of each phase in turn. */ /* ??? We could probably integrate this into the qsort we did - earlier, such that we naturally see these variables first, - and thus naturally allocate things in the right order. */ + earlier, such that we naturally see these variables first, + and thus naturally allocate things in the right order. */ if (has_protected_decls) - { - /* Phase 1 contains only character arrays. */ - expand_stack_vars (stack_protect_decl_phase_1); - - /* Phase 2 contains other kinds of arrays. */ - if (flag_stack_protect == 2) - expand_stack_vars (stack_protect_decl_phase_2); - } + { + /* Phase 1 contains only character arrays. */ + expand_stack_vars (stack_protect_decl_phase_1); + + /* Phase 2 contains other kinds of arrays. */ + if (flag_stack_protect == 2) + expand_stack_vars (stack_protect_decl_phase_2); + } expand_stack_vars (NULL); @@ -1563,7 +1560,7 @@ { HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; if (!FRAME_GROWS_DOWNWARD) - frame_offset += align - 1; + frame_offset += align - 1; frame_offset &= -align; } } @@ -1580,7 +1577,7 @@ { fprintf (dump_file, "\n;; "); print_gimple_stmt (dump_file, stmt, 0, - TDF_SLIM | (dump_flags & TDF_LINENO)); + TDF_SLIM | (dump_flags & TDF_LINENO)); fprintf (dump_file, "\n"); print_rtl (dump_file, since ? NEXT_INSN (since) : since); @@ -1614,11 +1611,11 @@ { lab_stmt = gsi_stmt (gsi); if (gimple_code (lab_stmt) != GIMPLE_LABEL) - break; + break; lab = gimple_label_label (lab_stmt); if (DECL_NONLOCAL (lab)) - break; + break; return label_rtx (lab); } @@ -1653,17 +1650,17 @@ rtx insn; remove_edge (e); /* Now, we have a single successor block, if we have insns to - insert on the remaining edge we potentially will insert - it at the end of this block (if the dest block isn't feasible) - in order to avoid splitting the edge. This insertion will take - place in front of the last jump. But we might have emitted - multiple jumps (conditional and one unconditional) to the - same destination. Inserting in front of the last one then - is a problem. See PR 40021. We fix this by deleting all - jumps except the last unconditional one. */ + insert on the remaining edge we potentially will insert + it at the end of this block (if the dest block isn't feasible) + in order to avoid splitting the edge. This insertion will take + place in front of the last jump. But we might have emitted + multiple jumps (conditional and one unconditional) to the + same destination. Inserting in front of the last one then + is a problem. See PR 40021. We fix this by deleting all + jumps except the last unconditional one. */ insn = PREV_INSN (get_last_insn ()); /* Make sure we have an unconditional jump. Otherwise we're - confused. */ + confused. */ gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); for (insn = PREV_INSN (insn); insn != last;) { @@ -1714,35 +1711,35 @@ { gimple second = SSA_NAME_DEF_STMT (op0); if (gimple_code (second) == GIMPLE_ASSIGN) - { - enum tree_code code2 = gimple_assign_rhs_code (second); - if (TREE_CODE_CLASS (code2) == tcc_comparison) - { - code = code2; - op0 = gimple_assign_rhs1 (second); - op1 = gimple_assign_rhs2 (second); - } - /* If jumps are cheap turn some more codes into - jumpy sequences. */ - else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) - { - if ((code2 == BIT_AND_EXPR - && TYPE_PRECISION (TREE_TYPE (op0)) == 1 - && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) - || code2 == TRUTH_AND_EXPR) - { - code = TRUTH_ANDIF_EXPR; - op0 = gimple_assign_rhs1 (second); - op1 = gimple_assign_rhs2 (second); - } - else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) - { - code = TRUTH_ORIF_EXPR; - op0 = gimple_assign_rhs1 (second); - op1 = gimple_assign_rhs2 (second); - } - } - } + { + enum tree_code code2 = gimple_assign_rhs_code (second); + if (TREE_CODE_CLASS (code2) == tcc_comparison) + { + code = code2; + op0 = gimple_assign_rhs1 (second); + op1 = gimple_assign_rhs2 (second); + } + /* If jumps are cheap turn some more codes into + jumpy sequences. */ + else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) + { + if ((code2 == BIT_AND_EXPR + && TYPE_PRECISION (TREE_TYPE (op0)) == 1 + && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) + || code2 == TRUTH_AND_EXPR) + { + code = TRUTH_ANDIF_EXPR; + op0 = gimple_assign_rhs1 (second); + op1 = gimple_assign_rhs2 (second); + } + else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) + { + code = TRUTH_ORIF_EXPR; + op0 = gimple_assign_rhs1 (second); + op1 = gimple_assign_rhs2 (second); + } + } + } } last2 = last = get_last_insn (); @@ -1766,11 +1763,11 @@ true_edge->probability); maybe_dump_rtl_for_gimple_stmt (stmt, last); if (true_edge->goto_locus) - { - set_curr_insn_source_location (true_edge->goto_locus); - set_curr_insn_block (true_edge->goto_block); - true_edge->goto_locus = curr_insn_locator (); - } + { + set_curr_insn_source_location (true_edge->goto_locus); + set_curr_insn_block (true_edge->goto_block); + true_edge->goto_locus = curr_insn_locator (); + } true_edge->goto_block = NULL; false_edge->flags |= EDGE_FALLTHRU; maybe_cleanup_end_of_block (false_edge, last); @@ -1782,11 +1779,11 @@ false_edge->probability); maybe_dump_rtl_for_gimple_stmt (stmt, last); if (false_edge->goto_locus) - { - set_curr_insn_source_location (false_edge->goto_locus); - set_curr_insn_block (false_edge->goto_block); - false_edge->goto_locus = curr_insn_locator (); - } + { + set_curr_insn_source_location (false_edge->goto_locus); + set_curr_insn_block (false_edge->goto_block); + false_edge->goto_locus = curr_insn_locator (); + } false_edge->goto_block = NULL; true_edge->flags |= EDGE_FALLTHRU; maybe_cleanup_end_of_block (true_edge, last); @@ -1904,9 +1901,9 @@ case GIMPLE_GOTO: op0 = gimple_goto_dest (stmt); if (TREE_CODE (op0) == LABEL_DECL) - expand_goto (op0); + expand_goto (op0); else - expand_computed_goto (op0); + expand_computed_goto (op0); break; case GIMPLE_LABEL: expand_label (gimple_label_label (stmt)); @@ -1928,30 +1925,30 @@ op0 = gimple_return_retval (stmt); if (op0 && op0 != error_mark_node) - { - tree result = DECL_RESULT (current_function_decl); - - /* If we are not returning the current function's RESULT_DECL, - build an assignment to it. */ - if (op0 != result) - { - /* I believe that a function's RESULT_DECL is unique. */ - gcc_assert (TREE_CODE (op0) != RESULT_DECL); - - /* ??? We'd like to use simply expand_assignment here, - but this fails if the value is of BLKmode but the return - decl is a register. expand_return has special handling - for this combination, which eventually should move - to common code. See comments there. Until then, let's - build a modify expression :-/ */ - op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), - result, op0); - } - } + { + tree result = DECL_RESULT (current_function_decl); + + /* If we are not returning the current function's RESULT_DECL, + build an assignment to it. */ + if (op0 != result) + { + /* I believe that a function's RESULT_DECL is unique. */ + gcc_assert (TREE_CODE (op0) != RESULT_DECL); + + /* ??? We'd like to use simply expand_assignment here, + but this fails if the value is of BLKmode but the return + decl is a register. expand_return has special handling + for this combination, which eventually should move + to common code. See comments there. Until then, let's + build a modify expression :-/ */ + op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), + result, op0); + } + } if (!op0) - expand_null_return (); + expand_null_return (); else - expand_return (op0); + expand_return (op0); break; case GIMPLE_ASSIGN: @@ -2092,16 +2089,16 @@ { rtx insn; for (insn = next_real_insn (last); insn; - insn = next_real_insn (insn)) - { - if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) - /* If we want exceptions for non-call insns, any - may_trap_p instruction may throw. */ - && GET_CODE (PATTERN (insn)) != CLOBBER - && GET_CODE (PATTERN (insn)) != USE - && insn_could_throw_p (insn)) - make_reg_eh_region_note (insn, 0, lp_nr); - } + insn = next_real_insn (insn)) + { + if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) + /* If we want exceptions for non-call insns, any + may_trap_p instruction may throw. */ + && GET_CODE (PATTERN (insn)) != CLOBBER + && GET_CODE (PATTERN (insn)) != USE + && insn_could_throw_p (insn)) + make_reg_eh_region_note (insn, 0, lp_nr); + } } return last; @@ -2155,22 +2152,22 @@ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) { if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) - { - if (e->dest != EXIT_BLOCK_PTR) - { - e->dest->count -= e->count; - e->dest->frequency -= EDGE_FREQUENCY (e); - if (e->dest->count < 0) - e->dest->count = 0; - if (e->dest->frequency < 0) - e->dest->frequency = 0; - } - count += e->count; - probability += e->probability; - remove_edge (e); - } + { + if (e->dest != EXIT_BLOCK_PTR) + { + e->dest->count -= e->count; + e->dest->frequency -= EDGE_FREQUENCY (e); + if (e->dest->count < 0) + e->dest->count = 0; + if (e->dest->frequency < 0) + e->dest->frequency = 0; + } + count += e->count; + probability += e->probability; + remove_edge (e); + } else - ei_next (&ei); + ei_next (&ei); } /* This is somewhat ugly: the call_expr expander often emits instructions @@ -2183,12 +2180,12 @@ while (NEXT_INSN (last)) { /* For instance an sqrt builtin expander expands if with - sibcall in the then and label for `else`. */ + sibcall in the then and label for `else`. */ if (LABEL_P (NEXT_INSN (last))) - { - *can_fallthru = true; - break; - } + { + *can_fallthru = true; + break; + } delete_insn (NEXT_INSN (last)); } @@ -2204,7 +2201,7 @@ last = BB_END (bb); if (BARRIER_P (last)) - BB_END (bb) = PREV_INSN (last); + BB_END (bb) = PREV_INSN (last); } maybe_dump_rtl_for_gimple_stmt (stmt, last2); @@ -2222,8 +2219,8 @@ (mode, gen_rtx_NE (BImode, mod, const0_rtx), gen_rtx_IF_THEN_ELSE (mode, gen_rtx_LT (BImode, - gen_rtx_DIV (mode, op1, mod), - const0_rtx), + gen_rtx_DIV (mode, op1, mod), + const0_rtx), constm1_rtx, const0_rtx), const0_rtx); } @@ -2238,8 +2235,8 @@ (mode, gen_rtx_NE (BImode, mod, const0_rtx), gen_rtx_IF_THEN_ELSE (mode, gen_rtx_GT (BImode, - gen_rtx_DIV (mode, op1, mod), - const0_rtx), + gen_rtx_DIV (mode, op1, mod), + const0_rtx), const1_rtx, const0_rtx), const0_rtx); } @@ -2266,13 +2263,13 @@ : 0) */ return gen_rtx_IF_THEN_ELSE (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), - gen_rtx_MINUS (mode, - gen_rtx_ABS (mode, op1), - gen_rtx_ABS (mode, mod))), + gen_rtx_MINUS (mode, + gen_rtx_ABS (mode, op1), + gen_rtx_ABS (mode, mod))), gen_rtx_IF_THEN_ELSE (mode, gen_rtx_GT (BImode, - gen_rtx_DIV (mode, op1, mod), - const0_rtx), + gen_rtx_DIV (mode, op1, mod), + const0_rtx), const1_rtx, constm1_rtx), const0_rtx); } @@ -2287,7 +2284,7 @@ /* (mod >= op1 - mod ? 1 : 0) */ return gen_rtx_IF_THEN_ELSE (mode, gen_rtx_GE (BImode, mod, - gen_rtx_MINUS (mode, op1, mod)), + gen_rtx_MINUS (mode, op1, mod)), const1_rtx, const0_rtx); } @@ -2316,8 +2313,8 @@ if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) x = simplify_gen_subreg (mode, x, xmode, - subreg_lowpart_offset - (mode, xmode)); + subreg_lowpart_offset + (mode, xmode)); else if (POINTERS_EXTEND_UNSIGNED > 0) x = gen_rtx_ZERO_EXTEND (mode, x); else if (!POINTERS_EXTEND_UNSIGNED) @@ -2410,7 +2407,7 @@ ternary: op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); if (!op2) - return NULL_RTX; + return NULL_RTX; /* Fall through. */ binary: @@ -2418,14 +2415,14 @@ case tcc_comparison: op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); if (!op1) - return NULL_RTX; + return NULL_RTX; /* Fall through. */ unary: case tcc_unary: op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); if (!op0) - return NULL_RTX; + return NULL_RTX; break; case tcc_type: @@ -2444,15 +2441,15 @@ { case STRING_CST: if (!lookup_constant_def (exp)) - { - if (strlen (TREE_STRING_POINTER (exp)) + 1 - != (size_t) TREE_STRING_LENGTH (exp)) - return NULL_RTX; - op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); - op0 = gen_rtx_MEM (BLKmode, op0); - set_mem_attributes (op0, exp, 0); - return op0; - } + { + if (strlen (TREE_STRING_POINTER (exp)) + 1 + != (size_t) TREE_STRING_LENGTH (exp)) + return NULL_RTX; + op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); + op0 = gen_rtx_MEM (BLKmode, op0); + set_mem_attributes (op0, exp, 0); + return op0; + } /* Fall through... */ case INTEGER_CST: @@ -2471,7 +2468,7 @@ op0 = DECL_RTL_IF_SET (exp); if (op0) - return op0; + return op0; op0 = gen_rtx_DEBUG_EXPR (mode); DEBUG_EXPR_TREE_DECL (op0) = exp; @@ -2505,7 +2502,7 @@ return NULL; } else - op0 = copy_rtx (op0); + op0 = copy_rtx (op0); if (GET_MODE (op0) == BLKmode /* If op0 is not BLKmode, but BLKmode is, adjust_mode @@ -2593,7 +2590,7 @@ case INDIRECT_REF: op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); if (!op0) - return NULL; + return NULL; if (TREE_CODE (exp) == MEM_REF) { @@ -2636,9 +2633,9 @@ return NULL; op0 = expand_debug_expr - (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); + (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); if (!op0) - return NULL; + return NULL; if (POINTER_TYPE_P (TREE_TYPE (exp))) as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); @@ -2793,15 +2790,15 @@ case FLOAT_EXPR: if (unsignedp) - return gen_rtx_UNSIGNED_FLOAT (mode, op0); + return gen_rtx_UNSIGNED_FLOAT (mode, op0); else - return gen_rtx_FLOAT (mode, op0); + return gen_rtx_FLOAT (mode, op0); case FIX_TRUNC_EXPR: if (unsignedp) - return gen_rtx_UNSIGNED_FIX (mode, op0); + return gen_rtx_UNSIGNED_FIX (mode, op0); else - return gen_rtx_FIX (mode, op0); + return gen_rtx_FIX (mode, op0); case POINTER_PLUS_EXPR: /* For the rare target where pointers are not the same size as @@ -2833,110 +2830,110 @@ case TRUNC_DIV_EXPR: case EXACT_DIV_EXPR: if (unsignedp) - return gen_rtx_UDIV (mode, op0, op1); + return gen_rtx_UDIV (mode, op0, op1); else - return gen_rtx_DIV (mode, op0, op1); + return gen_rtx_DIV (mode, op0, op1); case TRUNC_MOD_EXPR: if (unsignedp) - return gen_rtx_UMOD (mode, op0, op1); + return gen_rtx_UMOD (mode, op0, op1); else - return gen_rtx_MOD (mode, op0, op1); + return gen_rtx_MOD (mode, op0, op1); case FLOOR_DIV_EXPR: if (unsignedp) - return gen_rtx_UDIV (mode, op0, op1); + return gen_rtx_UDIV (mode, op0, op1); else - { - rtx div = gen_rtx_DIV (mode, op0, op1); - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = floor_sdiv_adjust (mode, mod, op1); - return gen_rtx_PLUS (mode, div, adj); - } + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = floor_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } case FLOOR_MOD_EXPR: if (unsignedp) - return gen_rtx_UMOD (mode, op0, op1); + return gen_rtx_UMOD (mode, op0, op1); else - { - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = floor_sdiv_adjust (mode, mod, op1); - adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); - return gen_rtx_PLUS (mode, mod, adj); - } + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = floor_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } case CEIL_DIV_EXPR: if (unsignedp) - { - rtx div = gen_rtx_UDIV (mode, op0, op1); - rtx mod = gen_rtx_UMOD (mode, op0, op1); - rtx adj = ceil_udiv_adjust (mode, mod, op1); - return gen_rtx_PLUS (mode, div, adj); - } + { + rtx div = gen_rtx_UDIV (mode, op0, op1); + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = ceil_udiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } else - { - rtx div = gen_rtx_DIV (mode, op0, op1); - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = ceil_sdiv_adjust (mode, mod, op1); - return gen_rtx_PLUS (mode, div, adj); - } + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = ceil_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } case CEIL_MOD_EXPR: if (unsignedp) - { - rtx mod = gen_rtx_UMOD (mode, op0, op1); - rtx adj = ceil_udiv_adjust (mode, mod, op1); - adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); - return gen_rtx_PLUS (mode, mod, adj); - } + { + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = ceil_udiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } else - { - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = ceil_sdiv_adjust (mode, mod, op1); - adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); - return gen_rtx_PLUS (mode, mod, adj); - } + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = ceil_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } case ROUND_DIV_EXPR: if (unsignedp) - { - rtx div = gen_rtx_UDIV (mode, op0, op1); - rtx mod = gen_rtx_UMOD (mode, op0, op1); - rtx adj = round_udiv_adjust (mode, mod, op1); - return gen_rtx_PLUS (mode, div, adj); - } + { + rtx div = gen_rtx_UDIV (mode, op0, op1); + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = round_udiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } else - { - rtx div = gen_rtx_DIV (mode, op0, op1); - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = round_sdiv_adjust (mode, mod, op1); - return gen_rtx_PLUS (mode, div, adj); - } + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = round_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } case ROUND_MOD_EXPR: if (unsignedp) - { - rtx mod = gen_rtx_UMOD (mode, op0, op1); - rtx adj = round_udiv_adjust (mode, mod, op1); - adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); - return gen_rtx_PLUS (mode, mod, adj); - } + { + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = round_udiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } else - { - rtx mod = gen_rtx_MOD (mode, op0, op1); - rtx adj = round_sdiv_adjust (mode, mod, op1); - adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); - return gen_rtx_PLUS (mode, mod, adj); - } + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = round_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } case LSHIFT_EXPR: return gen_rtx_ASHIFT (mode, op0, op1); case RSHIFT_EXPR: if (unsignedp) - return gen_rtx_LSHIFTRT (mode, op0, op1); + return gen_rtx_LSHIFTRT (mode, op0, op1); else - return gen_rtx_ASHIFTRT (mode, op0, op1); + return gen_rtx_ASHIFTRT (mode, op0, op1); case LROTATE_EXPR: return gen_rtx_ROTATE (mode, op0, op1); @@ -2946,15 +2943,15 @@ case MIN_EXPR: if (unsignedp) - return gen_rtx_UMIN (mode, op0, op1); + return gen_rtx_UMIN (mode, op0, op1); else - return gen_rtx_SMIN (mode, op0, op1); + return gen_rtx_SMIN (mode, op0, op1); case MAX_EXPR: if (unsignedp) - return gen_rtx_UMAX (mode, op0, op1); + return gen_rtx_UMAX (mode, op0, op1); else - return gen_rtx_SMAX (mode, op0, op1); + return gen_rtx_SMAX (mode, op0, op1); case BIT_AND_EXPR: case TRUTH_AND_EXPR: @@ -2979,27 +2976,27 @@ case LT_EXPR: if (unsignedp) - return gen_rtx_LTU (mode, op0, op1); + return gen_rtx_LTU (mode, op0, op1); else - return gen_rtx_LT (mode, op0, op1); + return gen_rtx_LT (mode, op0, op1); case LE_EXPR: if (unsignedp) - return gen_rtx_LEU (mode, op0, op1); + return gen_rtx_LEU (mode, op0, op1); else - return gen_rtx_LE (mode, op0, op1); + return gen_rtx_LE (mode, op0, op1); case GT_EXPR: if (unsignedp) - return gen_rtx_GTU (mode, op0, op1); + return gen_rtx_GTU (mode, op0, op1); else - return gen_rtx_GT (mode, op0, op1); + return gen_rtx_GT (mode, op0, op1); case GE_EXPR: if (unsignedp) - return gen_rtx_GEU (mode, op0, op1); + return gen_rtx_GEU (mode, op0, op1); else - return gen_rtx_GE (mode, op0, op1); + return gen_rtx_GE (mode, op0, op1); case EQ_EXPR: return gen_rtx_EQ (mode, op0, op1); @@ -3037,50 +3034,50 @@ case COMPLEX_EXPR: gcc_assert (COMPLEX_MODE_P (mode)); if (GET_MODE (op0) == VOIDmode) - op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); + op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); if (GET_MODE (op1) == VOIDmode) - op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); + op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); return gen_rtx_CONCAT (mode, op0, op1); case CONJ_EXPR: if (GET_CODE (op0) == CONCAT) - return gen_rtx_CONCAT (mode, XEXP (op0, 0), - gen_rtx_NEG (GET_MODE_INNER (mode), - XEXP (op0, 1))); - else - { - enum machine_mode imode = GET_MODE_INNER (mode); - rtx re, im; - - if (MEM_P (op0)) - { - re = adjust_address_nv (op0, imode, 0); - im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); - } + return gen_rtx_CONCAT (mode, XEXP (op0, 0), + gen_rtx_NEG (GET_MODE_INNER (mode), + XEXP (op0, 1))); else - { - enum machine_mode ifmode = int_mode_for_mode (mode); - enum machine_mode ihmode = int_mode_for_mode (imode); - rtx halfsize; - if (ifmode == BLKmode || ihmode == BLKmode) - return NULL; - halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); - re = op0; - if (mode != ifmode) - re = gen_rtx_SUBREG (ifmode, re, 0); - re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); - if (imode != ihmode) - re = gen_rtx_SUBREG (imode, re, 0); - im = copy_rtx (op0); - if (mode != ifmode) - im = gen_rtx_SUBREG (ifmode, im, 0); - im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); - if (imode != ihmode) - im = gen_rtx_SUBREG (imode, im, 0); - } - im = gen_rtx_NEG (imode, im); - return gen_rtx_CONCAT (mode, re, im); - } + { + enum machine_mode imode = GET_MODE_INNER (mode); + rtx re, im; + + if (MEM_P (op0)) + { + re = adjust_address_nv (op0, imode, 0); + im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); + } + else + { + enum machine_mode ifmode = int_mode_for_mode (mode); + enum machine_mode ihmode = int_mode_for_mode (imode); + rtx halfsize; + if (ifmode == BLKmode || ihmode == BLKmode) + return NULL; + halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); + re = op0; + if (mode != ifmode) + re = gen_rtx_SUBREG (ifmode, re, 0); + re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); + if (imode != ihmode) + re = gen_rtx_SUBREG (imode, re, 0); + im = copy_rtx (op0); + if (mode != ifmode) + im = gen_rtx_SUBREG (ifmode, im, 0); + im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); + if (imode != ihmode) + im = gen_rtx_SUBREG (imode, im, 0); + } + im = gen_rtx_NEG (imode, im); + return gen_rtx_CONCAT (mode, re, im); + } case ADDR_EXPR: op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); @@ -3119,7 +3116,7 @@ case VECTOR_CST: exp = build_constructor_from_list (TREE_TYPE (exp), - TREE_VECTOR_CST_ELTS (exp)); + TREE_VECTOR_CST_ELTS (exp)); /* Fall through. */ case CONSTRUCTOR: @@ -3154,7 +3151,7 @@ return op0; } else - goto flag_unsupported; + goto flag_unsupported; case CALL_EXPR: /* ??? Maybe handle some builtins? */ @@ -3303,33 +3300,33 @@ for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) if (DEBUG_INSN_P (insn)) { - tree value = (tree)INSN_VAR_LOCATION_LOC (insn); - rtx val; - enum machine_mode mode; - - if (value == NULL_TREE) - val = NULL_RTX; - else - { - val = expand_debug_expr (value); - gcc_assert (last == get_last_insn ()); - } - - if (!val) - val = gen_rtx_UNKNOWN_VAR_LOC (); - else - { - mode = GET_MODE (INSN_VAR_LOCATION (insn)); - - gcc_assert (mode == GET_MODE (val) - || (GET_MODE (val) == VOIDmode - && (CONST_INT_P (val) - || GET_CODE (val) == CONST_FIXED - || GET_CODE (val) == CONST_DOUBLE - || GET_CODE (val) == LABEL_REF))); - } - - INSN_VAR_LOCATION_LOC (insn) = val; + tree value = (tree)INSN_VAR_LOCATION_LOC (insn); + rtx val; + enum machine_mode mode; + + if (value == NULL_TREE) + val = NULL_RTX; + else + { + val = expand_debug_expr (value); + gcc_assert (last == get_last_insn ()); + } + + if (!val) + val = gen_rtx_UNKNOWN_VAR_LOC (); + else + { + mode = GET_MODE (INSN_VAR_LOCATION (insn)); + + gcc_assert (mode == GET_MODE (val) + || (GET_MODE (val) == VOIDmode + && (CONST_INT_P (val) + || GET_CODE (val) == CONST_FIXED + || GET_CODE (val) == CONST_DOUBLE + || GET_CODE (val) == LABEL_REF))); + } + + INSN_VAR_LOCATION_LOC (insn) = val; } flag_strict_aliasing = save_strict_alias; @@ -3350,7 +3347,7 @@ if (dump_file) fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", - bb->index); + bb->index); /* Note that since we are now transitioning from GIMPLE to RTL, we cannot use the gsi_*_bb() routines because they expect the basic @@ -3374,11 +3371,11 @@ gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); if (bb->next_bb == EXIT_BLOCK_PTR - && !gimple_return_retval (ret_stmt)) - { - gsi_remove (&gsi, false); - single_succ_edge (bb)->flags |= EDGE_FALLTHRU; - } + && !gimple_return_retval (ret_stmt)) + { + gsi_remove (&gsi, false); + single_succ_edge (bb)->flags |= EDGE_FALLTHRU; + } } gsi = gsi_start (stmts); @@ -3386,7 +3383,7 @@ { stmt = gsi_stmt (gsi); if (gimple_code (stmt) != GIMPLE_LABEL) - stmt = NULL; + stmt = NULL; } elt = pointer_map_contains (lab_rtx_for_bb, bb); @@ -3396,19 +3393,19 @@ last = get_last_insn (); if (stmt) - { - expand_gimple_stmt (stmt); - gsi_next (&gsi); - } + { + expand_gimple_stmt (stmt); + gsi_next (&gsi); + } if (elt) - emit_label ((rtx) *elt); + emit_label ((rtx) *elt); /* Java emits line number notes in the top of labels. - ??? Make this go away once line number notes are obsoleted. */ + ??? Make this go away once line number notes are obsoleted. */ BB_HEAD (bb) = NEXT_INSN (last); if (NOTE_P (BB_HEAD (bb))) - BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); + BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); maybe_dump_rtl_for_gimple_stmt (stmt, last); @@ -3525,13 +3522,13 @@ currently_expanding_gimple_stmt = stmt; /* Expand this statement, then evaluate the resulting RTL and - fixup the CFG accordingly. */ + fixup the CFG accordingly. */ if (gimple_code (stmt) == GIMPLE_COND) - { - new_bb = expand_gimple_cond (bb, stmt); - if (new_bb) - return new_bb; - } + { + new_bb = expand_gimple_cond (bb, stmt); + if (new_bb) + return new_bb; + } else if (gimple_debug_bind_p (stmt)) { location_t sloc = get_curr_insn_source_location (); @@ -3594,37 +3591,37 @@ set_curr_insn_block (sblock); } else - { - if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) - { - bool can_fallthru; - new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); - if (new_bb) - { - if (can_fallthru) - bb = new_bb; - else - return new_bb; - } - } - else - { - def_operand_p def_p; - def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); - - if (def_p != NULL) - { - /* Ignore this stmt if it is in the list of - replaceable expressions. */ - if (SA.values - && bitmap_bit_p (SA.values, - SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) - continue; - } - last = expand_gimple_stmt (stmt); - maybe_dump_rtl_for_gimple_stmt (stmt, last); - } - } + { + if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) + { + bool can_fallthru; + new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); + if (new_bb) + { + if (can_fallthru) + bb = new_bb; + else + return new_bb; + } + } + else + { + def_operand_p def_p; + def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); + + if (def_p != NULL) + { + /* Ignore this stmt if it is in the list of + replaceable expressions. */ + if (SA.values + && bitmap_bit_p (SA.values, + SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) + continue; + } + last = expand_gimple_stmt (stmt); + maybe_dump_rtl_for_gimple_stmt (stmt, last); + } + } } currently_expanding_gimple_stmt = NULL; @@ -3633,17 +3630,17 @@ FOR_EACH_EDGE (e, ei, bb->succs) { if (e->goto_locus && e->goto_block) - { - set_curr_insn_source_location (e->goto_locus); - set_curr_insn_block (e->goto_block); - e->goto_locus = curr_insn_locator (); - } + { + set_curr_insn_source_location (e->goto_locus); + set_curr_insn_block (e->goto_block); + e->goto_locus = curr_insn_locator (); + } e->goto_block = NULL; if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) - { - emit_jump (label_rtx_for_bb (e->dest)); - e->flags &= ~EDGE_FALLTHRU; - } + { + emit_jump (label_rtx_for_bb (e->dest)); + e->flags &= ~EDGE_FALLTHRU; + } } /* Expanded RTL can create a jump in the last instruction of block. @@ -3706,8 +3703,8 @@ flags = EDGE_FALLTHRU; init_block = create_basic_block (NEXT_INSN (get_insns ()), - get_last_insn (), - ENTRY_BLOCK_PTR); + get_last_insn (), + ENTRY_BLOCK_PTR); init_block->frequency = ENTRY_BLOCK_PTR->frequency; init_block->count = ENTRY_BLOCK_PTR->count; if (e) @@ -3774,7 +3771,7 @@ while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) head = NEXT_INSN (head); exit_block = create_basic_block (NEXT_INSN (head), end, - EXIT_BLOCK_PTR->prev_bb); + EXIT_BLOCK_PTR->prev_bb); exit_block->frequency = EXIT_BLOCK_PTR->frequency; exit_block->count = EXIT_BLOCK_PTR->count; @@ -3783,9 +3780,9 @@ { e = EDGE_PRED (EXIT_BLOCK_PTR, ix); if (!(e->flags & EDGE_ABNORMAL)) - redirect_edge_succ (e, exit_block); + redirect_edge_succ (e, exit_block); else - ix++; + ix++; } e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); @@ -3794,9 +3791,9 @@ FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) if (e2 != e) { - e->count -= e2->count; - exit_block->count -= e2->count; - exit_block->frequency -= EDGE_FREQUENCY (e2); + e->count -= e2->count; + exit_block->count -= e2->count; + exit_block->frequency -= EDGE_FREQUENCY (e2); } if (e->count < 0) e->count = 0; @@ -3813,7 +3810,7 @@ static tree discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, - void *data ATTRIBUTE_UNUSED) + void *data ATTRIBUTE_UNUSED) { tree t = *tp; @@ -3822,26 +3819,26 @@ else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) { while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) - && is_gimple_min_invariant (TREE_OPERAND (t, 1)) - && (!TREE_OPERAND (t, 2) - || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) - || (TREE_CODE (t) == COMPONENT_REF - && (!TREE_OPERAND (t,2) - || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) - || TREE_CODE (t) == BIT_FIELD_REF - || TREE_CODE (t) == REALPART_EXPR - || TREE_CODE (t) == IMAGPART_EXPR - || TREE_CODE (t) == VIEW_CONVERT_EXPR - || CONVERT_EXPR_P (t)) - t = TREE_OPERAND (t, 0); + && is_gimple_min_invariant (TREE_OPERAND (t, 1)) + && (!TREE_OPERAND (t, 2) + || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) + || (TREE_CODE (t) == COMPONENT_REF + && (!TREE_OPERAND (t,2) + || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) + || TREE_CODE (t) == BIT_FIELD_REF + || TREE_CODE (t) == REALPART_EXPR + || TREE_CODE (t) == IMAGPART_EXPR + || TREE_CODE (t) == VIEW_CONVERT_EXPR + || CONVERT_EXPR_P (t)) + t = TREE_OPERAND (t, 0); if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) - { - t = get_base_address (t); - if (t && DECL_P (t) + { + t = get_base_address (t); + if (t && DECL_P (t) && DECL_MODE (t) != BLKmode) - TREE_ADDRESSABLE (t) = 1; - } + TREE_ADDRESSABLE (t) = 1; + } *walk_subtrees = 0; } @@ -3915,7 +3912,7 @@ crtl->stack_alignment_needed = preferred_stack_boundary; gcc_assert (crtl->stack_alignment_needed - <= crtl->stack_alignment_estimated); + <= crtl->stack_alignment_estimated); crtl->stack_realign_needed = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; @@ -3965,7 +3962,7 @@ rewrite_out_of_ssa (&SA); timevar_pop (TV_OUT_OF_SSA); SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, - sizeof (rtx)); + sizeof (rtx)); /* Some backends want to know that we are expanding to RTL. */ currently_expanding_to_rtl = 1; @@ -4056,26 +4053,26 @@ tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); if (TREE_CODE (var) != VAR_DECL - && !SA.partition_to_pseudo[i]) - SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); + && !SA.partition_to_pseudo[i]) + SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); gcc_assert (SA.partition_to_pseudo[i]); /* If this decl was marked as living in multiple places, reset this now to NULL. */ if (DECL_RTL_IF_SET (var) == pc_rtx) - SET_DECL_RTL (var, NULL); + SET_DECL_RTL (var, NULL); /* Some RTL parts really want to look at DECL_RTL(x) when x was a decl marked in REG_ATTR or MEM_ATTR. We could use - SET_DECL_RTL here making this available, but that would mean - to select one of the potentially many RTLs for one DECL. Instead - of doing that we simply reset the MEM_EXPR of the RTL in question, - then nobody can get at it and hence nobody can call DECL_RTL on it. */ + SET_DECL_RTL here making this available, but that would mean + to select one of the potentially many RTLs for one DECL. Instead + of doing that we simply reset the MEM_EXPR of the RTL in question, + then nobody can get at it and hence nobody can call DECL_RTL on it. */ if (!DECL_RTL_SET_P (var)) - { - if (MEM_P (SA.partition_to_pseudo[i])) - set_mem_expr (SA.partition_to_pseudo[i], NULL); - } + { + if (MEM_P (SA.partition_to_pseudo[i])) + set_mem_expr (SA.partition_to_pseudo[i], NULL); + } } /* If this function is `main', emit a call to `__main' @@ -4165,20 +4162,20 @@ edge e; edge_iterator ei; for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) - { - /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ - e->flags &= ~EDGE_EXECUTABLE; - - /* At the moment not all abnormal edges match the RTL - representation. It is safe to remove them here as - find_many_sub_basic_blocks will rediscover them. - In the future we should get this fixed properly. */ - if ((e->flags & EDGE_ABNORMAL) - && !(e->flags & EDGE_SIBCALL)) - remove_edge (e); - else - ei_next (&ei); - } + { + /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ + e->flags &= ~EDGE_EXECUTABLE; + + /* At the moment not all abnormal edges match the RTL + representation. It is safe to remove them here as + find_many_sub_basic_blocks will rediscover them. + In the future we should get this fixed properly. */ + if ((e->flags & EDGE_ABNORMAL) + && !(e->flags & EDGE_SIBCALL)) + remove_edge (e); + else + ei_next (&ei); + } } blocks = sbitmap_alloc (last_basic_block); @@ -4206,7 +4203,7 @@ if (dump_file) { fprintf (dump_file, - "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); + "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); /* And the pass manager will dump RTL for us. */ } @@ -4215,10 +4212,10 @@ { tree parent; for (parent = DECL_CONTEXT (current_function_decl); - parent != NULL_TREE; - parent = get_containing_scope (parent)) + parent != NULL_TREE; + parent = get_containing_scope (parent)) if (TREE_CODE (parent) == FUNCTION_DECL) - TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; + TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; } /* We are now committed to emitting code for this function. Do any @@ -4244,9 +4241,9 @@ { { RTL_PASS, - "expand", /* name */ + "expand", /* name */ NULL, /* gate */ - gimple_expand_cfg, /* execute */ + gimple_expand_cfg, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ @@ -4254,10 +4251,10 @@ PROP_ssa | PROP_gimple_leh | PROP_cfg | PROP_gimple_lcx, /* properties_required */ PROP_rtl, /* properties_provided */ - PROP_ssa | PROP_trees, /* properties_destroyed */ + PROP_ssa | PROP_trees, /* properties_destroyed */ TODO_verify_ssa | TODO_verify_flow - | TODO_verify_stmts, /* todo_flags_start */ + | TODO_verify_stmts, /* todo_flags_start */ TODO_dump_func - | TODO_ggc_collect /* todo_flags_finish */ + | TODO_ggc_collect /* todo_flags_finish */ } };