Mercurial > hg > CbC > CbC_gcc
diff gcc/tree-ssa-structalias.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
line wrap: on
line diff
--- a/gcc/tree-ssa-structalias.c Fri Oct 27 22:46:09 2017 +0900 +++ b/gcc/tree-ssa-structalias.c Thu Oct 25 07:37:49 2018 +0900 @@ -1,5 +1,5 @@ /* Tree based points-to analysis - Copyright (C) 2005-2017 Free Software Foundation, Inc. + Copyright (C) 2005-2018 Free Software Foundation, Inc. Contributed by Daniel Berlin <dberlin@dberlin.org> This file is part of GCC. @@ -42,6 +42,7 @@ #include "varasm.h" #include "stringpool.h" #include "attribs.h" +#include "tree-ssa.h" /* The idea behind this analyzer is to generate set constraints from the program, then solve the resulting constraints in order to generate the @@ -2928,15 +2929,26 @@ /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */ gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t)); - /* For parameters, get at the points-to set for the actual parm - decl. */ if (TREE_CODE (t) == SSA_NAME - && SSA_NAME_IS_DEFAULT_DEF (t) - && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL - || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)) - { - get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); - return; + && SSA_NAME_IS_DEFAULT_DEF (t)) + { + /* For parameters, get at the points-to set for the actual parm + decl. */ + if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL + || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL) + { + get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); + return; + } + /* For undefined SSA names return nothing. */ + else if (!ssa_defined_default_def_p (t)) + { + cexpr.var = nothing_id; + cexpr.type = SCALAR; + cexpr.offset = 0; + results->safe_push (cexpr); + return; + } } /* For global variables resort to the alias target. */ @@ -3191,9 +3203,9 @@ bool address_p, bool lhs_p) { tree orig_t = t; - HOST_WIDE_INT bitsize = -1; - HOST_WIDE_INT bitmaxsize = -1; - HOST_WIDE_INT bitpos; + poly_int64 bitsize = -1; + poly_int64 bitmaxsize = -1; + poly_int64 bitpos; bool reverse; tree forzero; @@ -3255,8 +3267,8 @@ ignore this constraint. When we handle pointer subtraction, we may have to do something cute here. */ - if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize - && bitmaxsize != 0) + if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize) + && maybe_ne (bitmaxsize, 0)) { /* It's also not true that the constraint will actually start at the right offset, it may start in some padding. We only care about @@ -3268,8 +3280,8 @@ cexpr.offset = 0; for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr)) { - if (ranges_overlap_p (curr->offset, curr->size, - bitpos, bitmaxsize)) + if (ranges_maybe_overlap_p (poly_int64 (curr->offset), + curr->size, bitpos, bitmaxsize)) { cexpr.var = curr->id; results->safe_push (cexpr); @@ -3302,7 +3314,7 @@ results->safe_push (cexpr); } } - else if (bitmaxsize == 0) + else if (known_eq (bitmaxsize, 0)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Access to zero-sized part of variable, " @@ -3317,13 +3329,15 @@ /* If we do not know exactly where the access goes say so. Note that only for non-structure accesses we know that we access at most one subfiled of any variable. */ - if (bitpos == -1 - || bitsize != bitmaxsize + HOST_WIDE_INT const_bitpos; + if (!bitpos.is_constant (&const_bitpos) + || const_bitpos == -1 + || maybe_ne (bitsize, bitmaxsize) || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)) || result.offset == UNKNOWN_OFFSET) result.offset = UNKNOWN_OFFSET; else - result.offset += bitpos; + result.offset += const_bitpos; } else if (result.type == ADDRESSOF) { @@ -3660,14 +3674,17 @@ && (rhsp->type == SCALAR || rhsp->type == ADDRESSOF)) { - HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset; - HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset; + HOST_WIDE_INT lhssize, lhsoffset; + HOST_WIDE_INT rhssize, rhsoffset; bool reverse; unsigned k = 0; - get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize, - &reverse); - get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize, - &reverse); + if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse) + || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize, + &reverse)) + { + process_all_all_constraints (lhsc, rhsc); + return; + } for (j = 0; lhsc.iterate (j, &lhsp);) { varinfo_t lhsv, rhsv; @@ -3898,7 +3915,7 @@ c.offset = 0; c.type = SCALAR; } - else if (TREE_CODE (fi->decl) == FUNCTION_DECL) + else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL) { varinfo_t ai = first_vi_for_offset (fi, part); if (ai) @@ -4070,7 +4087,7 @@ initialized and thus may point to global memory. All builtin functions with the malloc attribute behave in a sane way. */ if (!fndecl - || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL) + || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) make_constraint_from (vi, nonlocal_id); tmpc.var = vi->id; tmpc.offset = 0; @@ -4493,7 +4510,9 @@ that use the memory pointed to by their arguments (but not transitively). */ case BUILT_IN_STRCMP: + case BUILT_IN_STRCMP_EQ: case BUILT_IN_STRNCMP: + case BUILT_IN_STRNCMP_EQ: case BUILT_IN_STRCASECMP: case BUILT_IN_STRNCASECMP: case BUILT_IN_MEMCMP: @@ -4722,13 +4741,13 @@ varinfo_t fi; if (fndecl != NULL_TREE - && DECL_BUILT_IN (fndecl) + && fndecl_built_in_p (fndecl) && find_func_aliases_for_builtin_call (fn, t)) return; fi = get_fi_for_callee (t); if (!in_ipa_mode - || (fndecl && !fi->is_fn_info)) + || (fi->decl && fndecl && !fi->is_fn_info)) { auto_vec<ce_s, 16> rhsc; int flags = gimple_call_flags (t); @@ -4826,35 +4845,19 @@ gimple *t = origt; auto_vec<ce_s, 16> lhsc; auto_vec<ce_s, 16> rhsc; - struct constraint_expr *c; varinfo_t fi; /* Now build constraints expressions. */ if (gimple_code (t) == GIMPLE_PHI) { - size_t i; - unsigned int j; - /* For a phi node, assign all the arguments to the result. */ get_constraint_for (gimple_phi_result (t), &lhsc); - for (i = 0; i < gimple_phi_num_args (t); i++) - { - tree strippedrhs = PHI_ARG_DEF (t, i); - - STRIP_NOPS (strippedrhs); + for (unsigned i = 0; i < gimple_phi_num_args (t); i++) + { get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc); - - FOR_EACH_VEC_ELT (lhsc, j, c) - { - struct constraint_expr *c2; - while (rhsc.length () > 0) - { - c2 = &rhsc.last (); - process_constraint (new_constraint (*c, *c2)); - rhsc.pop (); - } - } + process_all_all_constraints (lhsc, rhsc); + rhsc.truncate (0); } } /* In IPA mode, we need to generate constraints to pass call @@ -5345,7 +5348,8 @@ /* For callees without function info (that's external functions), ESCAPED is clobbered and used. */ - if (gimple_call_fndecl (t) + if (cfi->decl + && TREE_CODE (cfi->decl) == FUNCTION_DECL && !cfi->is_fn_info) { varinfo_t vi; @@ -5930,11 +5934,14 @@ This will also create any varinfo structures necessary for fields of DECL. DECL is a function parameter if HANDLE_PARAM is set. HANDLED_STRUCT_TYPE is used to register struct types reached by following - restrict pointers. This is needed to prevent infinite recursion. */ + restrict pointers. This is needed to prevent infinite recursion. + If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL + does not advertise it. */ static varinfo_t create_variable_info_for_1 (tree decl, const char *name, bool add_id, - bool handle_param, bitmap handled_struct_type) + bool handle_param, bitmap handled_struct_type, + bool add_restrict = false) { varinfo_t vi, newvi; tree decl_type = TREE_TYPE (decl); @@ -6008,7 +6015,7 @@ vi->size = vi->fullsize; vi->is_full_var = true; if (POINTER_TYPE_P (decl_type) - && TYPE_RESTRICT (decl_type)) + && (TYPE_RESTRICT (decl_type) || add_restrict)) vi->only_restrict_pointers = 1; if (vi->only_restrict_pointers && !type_contains_placeholder_p (TREE_TYPE (decl_type)) @@ -6105,6 +6112,28 @@ static unsigned int create_variable_info_for (tree decl, const char *name, bool add_id) { + /* First see if we are dealing with an ifunc resolver call and + assiociate that with a call to the resolver function result. */ + cgraph_node *node; + if (in_ipa_mode + && TREE_CODE (decl) == FUNCTION_DECL + && (node = cgraph_node::get (decl)) + && node->ifunc_resolver) + { + varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl); + constraint_expr rhs + = get_function_part_constraint (fi, fi_result); + fi = new_var_info (NULL_TREE, "ifuncres", true); + fi->is_reg_var = true; + constraint_expr lhs; + lhs.type = SCALAR; + lhs.var = fi->id; + lhs.offset = 0; + process_constraint (new_constraint (lhs, rhs)); + insert_vi_for_tree (decl, fi); + return fi->id; + } + varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL); unsigned int id = vi->id; @@ -6237,6 +6266,7 @@ { tree t; bitmap handled_struct_type = NULL; + bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl); /* For each incoming pointer argument arg, create the constraint ARG = NONLOCAL or a dummy variable if it is a restrict qualified @@ -6248,10 +6278,12 @@ varinfo_t p = create_variable_info_for_1 (t, alias_get_name (t), false, true, - handled_struct_type); + handled_struct_type, this_parm_in_ctor); insert_vi_for_tree (t, p); make_param_constraints (p); + + this_parm_in_ctor = false; } if (handled_struct_type != NULL) @@ -7365,6 +7397,7 @@ struct vls_data { unsigned short clique; + bool escaped_p; bitmap rvars; }; @@ -7376,6 +7409,7 @@ { unsigned short clique = ((vls_data *) data)->clique; bitmap rvars = ((vls_data *) data)->rvars; + bool escaped_p = ((vls_data *) data)->escaped_p; if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF) { @@ -7396,7 +7430,8 @@ return false; vi = get_varinfo (find (vi->id)); - if (bitmap_intersect_p (rvars, vi->solution)) + if (bitmap_intersect_p (rvars, vi->solution) + || (escaped_p && bitmap_bit_p (vi->solution, escaped_id))) return false; } @@ -7431,33 +7466,40 @@ return false; } -/* If REF is a MEM_REF then assign a clique, base pair to it, updating - CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info - was assigned to REF. */ +struct msdi_data { + tree ptr; + unsigned short *clique; + unsigned short *last_ruid; + varinfo_t restrict_var; +}; + +/* If BASE is a MEM_REF then assign a clique, base pair to it, updating + CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA. + Return whether dependence info was assigned to BASE. */ static bool -maybe_set_dependence_info (tree ref, tree ptr, - unsigned short &clique, varinfo_t restrict_var, - unsigned short &last_ruid) -{ - while (handled_component_p (ref)) - ref = TREE_OPERAND (ref, 0); - if ((TREE_CODE (ref) == MEM_REF - || TREE_CODE (ref) == TARGET_MEM_REF) - && TREE_OPERAND (ref, 0) == ptr) +maybe_set_dependence_info (gimple *, tree base, tree, void *data) +{ + tree ptr = ((msdi_data *)data)->ptr; + unsigned short &clique = *((msdi_data *)data)->clique; + unsigned short &last_ruid = *((msdi_data *)data)->last_ruid; + varinfo_t restrict_var = ((msdi_data *)data)->restrict_var; + if ((TREE_CODE (base) == MEM_REF + || TREE_CODE (base) == TARGET_MEM_REF) + && TREE_OPERAND (base, 0) == ptr) { /* Do not overwrite existing cliques. This avoids overwriting dependence info inlined from a function with restrict parameters inlined into a function with restrict parameters. This usually means we prefer to be precise in innermost loops. */ - if (MR_DEPENDENCE_CLIQUE (ref) == 0) + if (MR_DEPENDENCE_CLIQUE (base) == 0) { if (clique == 0) clique = ++cfun->last_clique; if (restrict_var->ruid == 0) restrict_var->ruid = ++last_ruid; - MR_DEPENDENCE_CLIQUE (ref) = clique; - MR_DEPENDENCE_BASE (ref) = restrict_var->ruid; + MR_DEPENDENCE_CLIQUE (base) = clique; + MR_DEPENDENCE_BASE (base) = restrict_var->ruid; return true; } } @@ -7473,6 +7515,7 @@ unsigned short clique = 0; unsigned short last_ruid = 0; bitmap rvars = BITMAP_ALLOC (NULL); + bool escaped_p = false; for (unsigned i = 0; i < num_ssa_names; ++i) { tree ptr = ssa_name (i); @@ -7529,20 +7572,18 @@ imm_use_iterator ui; gimple *use_stmt; bool used = false; + msdi_data data = { ptr, &clique, &last_ruid, restrict_var }; FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr) + used |= walk_stmt_load_store_ops (use_stmt, &data, + maybe_set_dependence_info, + maybe_set_dependence_info); + if (used) { - /* ??? Calls and asms. */ - if (!gimple_assign_single_p (use_stmt)) - continue; - used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt), - ptr, clique, restrict_var, - last_ruid); - used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt), - ptr, clique, restrict_var, - last_ruid); + bitmap_set_bit (rvars, restrict_var->id); + varinfo_t escaped = get_varinfo (find (escaped_id)); + if (bitmap_bit_p (escaped->solution, restrict_var->id)) + escaped_p = true; } - if (used) - bitmap_set_bit (rvars, restrict_var->id); } } @@ -7555,7 +7596,7 @@ parameters) we can't restrict scoping properly thus the following is too aggressive there. For now we have excluded those globals from getting into the MR_DEPENDENCE machinery. */ - vls_data data = { clique, rvars }; + vls_data data = { clique, escaped_p, rvars }; basic_block bb; FOR_EACH_BB_FN (bb, cfun) for (gimple_stmt_iterator gsi = gsi_start_bb (bb); @@ -7702,7 +7743,8 @@ if ((node->alias || (node->thunk.thunk_p && ! node->global.inlined_to)) - && node->analyzed) + && node->analyzed + && !node->ifunc_resolver) insert_vi_for_tree (node->decl, (varinfo_t)data); return false; } @@ -8074,7 +8116,7 @@ (node->decl, first_vi_for_offset (fi, fi_uses)); } /* Handle direct calls to external functions. */ - else if (decl) + else if (decl && (!fi || fi->decl)) { pt = gimple_call_use_set (stmt); if (gimple_call_flags (stmt) & ECF_CONST) @@ -8119,8 +8161,7 @@ } } /* Handle indirect calls. */ - else if (!decl - && (fi = get_fi_for_callee (stmt))) + else if ((fi = get_fi_for_callee (stmt))) { /* We need to accumulate all clobbers/uses of all possible callees. */ @@ -8176,6 +8217,8 @@ } } } + else + gcc_unreachable (); } }