Mercurial > hg > CbC > CbC_gcc
diff gcc/tracer.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | 77e2b8dfacca |
children | 84e7813d76e9 |
line wrap: on
line diff
--- a/gcc/tracer.c Sun Aug 21 07:07:55 2011 +0900 +++ b/gcc/tracer.c Fri Oct 27 22:46:09 2017 +0900 @@ -1,8 +1,7 @@ /* The tracer pass for the GNU compiler. Contributed by Jan Hubicka, SuSE Labs. Adapted to work on GIMPLE instead of RTL by Robert Kidd, UIUC. - Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 - Free Software Foundation, Inc. + Copyright (C) 2001-2017 Free Software Foundation, Inc. This file is part of GCC. @@ -37,29 +36,28 @@ #include "config.h" #include "system.h" #include "coretypes.h" -#include "tm.h" -#include "tree.h" +#include "backend.h" #include "rtl.h" -#include "hard-reg-set.h" -#include "basic-block.h" -#include "output.h" -#include "cfglayout.h" -#include "fibheap.h" -#include "flags.h" -#include "timevar.h" +#include "tree.h" +#include "gimple.h" +#include "cfghooks.h" +#include "tree-pass.h" +#include "profile.h" +#include "cfganal.h" #include "params.h" -#include "coverage.h" -#include "tree-pass.h" -#include "tree-flow.h" +#include "gimple-iterator.h" +#include "tree-cfg.h" +#include "tree-ssa.h" #include "tree-inline.h" +#include "cfgloop.h" +#include "fibonacci_heap.h" +#include "tracer.h" static int count_insns (basic_block); -static bool ignore_bb_p (const_basic_block); static bool better_p (const_edge, const_edge); static edge find_best_successor (basic_block); static edge find_best_predecessor (basic_block); static int find_trace (basic_block, basic_block *); -static void tail_duplicate (void); /* Minimal outgoing edge probability considered for superblock formation. */ static int probability_cutoff; @@ -67,33 +65,49 @@ /* A bit BB->index is set if BB has already been seen, i.e. it is connected to some trace already. */ -sbitmap bb_seen; +static sbitmap bb_seen; static inline void mark_bb_seen (basic_block bb) { - unsigned int size = SBITMAP_SIZE_BYTES (bb_seen) * 8; + unsigned int size = SBITMAP_SIZE (bb_seen); if ((unsigned int)bb->index >= size) bb_seen = sbitmap_resize (bb_seen, size * 2, 0); - SET_BIT (bb_seen, bb->index); + bitmap_set_bit (bb_seen, bb->index); } static inline bool bb_seen_p (basic_block bb) { - return TEST_BIT (bb_seen, bb->index); + return bitmap_bit_p (bb_seen, bb->index); } /* Return true if we should ignore the basic block for purposes of tracing. */ -static bool +bool ignore_bb_p (const_basic_block bb) { if (bb->index < NUM_FIXED_BLOCKS) return true; if (optimize_bb_for_size_p (bb)) return true; + + if (gimple *g = last_stmt (CONST_CAST_BB (bb))) + { + /* A transaction is a single entry multiple exit region. It + must be duplicated in its entirety or not at all. */ + if (gimple_code (g) == GIMPLE_TRANSACTION) + return true; + + /* An IFN_UNIQUE call must be duplicated as part of its group, + or not at all. */ + if (is_gimple_call (g) + && gimple_call_internal_p (g) + && gimple_call_internal_unique_p (g)) + return true; + } + return false; } @@ -103,7 +117,7 @@ count_insns (basic_block bb) { gimple_stmt_iterator gsi; - gimple stmt; + gimple *stmt; int n = 0; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) @@ -118,12 +132,11 @@ static bool better_p (const_edge e1, const_edge e2) { - if (e1->count != e2->count) - return e1->count > e2->count; - if (e1->src->frequency * e1->probability != - e2->src->frequency * e2->probability) - return (e1->src->frequency * e1->probability - > e2->src->frequency * e2->probability); + if (e1->count ().initialized_p () && e2->count ().initialized_p () + && ((e1->count () > e2->count ()) || (e1->count () < e2->count ()))) + return e1->count () > e2->count (); + if (EDGE_FREQUENCY (e1) != EDGE_FREQUENCY (e2)) + return EDGE_FREQUENCY (e1) > EDGE_FREQUENCY (e2); /* This is needed to avoid changes in the decision after CFG is modified. */ if (e1->src != e2->src) @@ -145,7 +158,8 @@ best = e; if (!best || ignore_bb_p (best->dest)) return NULL; - if (best->probability <= probability_cutoff) + if (best->probability.initialized_p () + && best->probability.to_reg_br_prob_base () <= probability_cutoff) return NULL; return best; } @@ -212,29 +226,50 @@ return i; } +/* Duplicate block BB2, placing it after BB in the CFG. Return the + newly created block. */ +basic_block +transform_duplicate (basic_block bb, basic_block bb2) +{ + edge e; + basic_block copy; + + e = find_edge (bb, bb2); + + copy = duplicate_block (bb2, e, bb); + flush_pending_stmts (e); + + add_phi_args_after_copy (©, 1, NULL); + + return (copy); +} + /* Look for basic blocks in frequency order, construct traces and tail duplicate if profitable. */ -static void +static bool tail_duplicate (void) { - fibnode_t *blocks = XCNEWVEC (fibnode_t, last_basic_block); - basic_block *trace = XNEWVEC (basic_block, n_basic_blocks); - int *counts = XNEWVEC (int, last_basic_block); + auto_vec<fibonacci_node<long, basic_block_def>*> blocks; + blocks.safe_grow_cleared (last_basic_block_for_fn (cfun)); + + basic_block *trace = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); + int *counts = XNEWVEC (int, last_basic_block_for_fn (cfun)); int ninsns = 0, nduplicated = 0; gcov_type weighted_insns = 0, traced_insns = 0; - fibheap_t heap = fibheap_new (); + fibonacci_heap<long, basic_block_def> heap (LONG_MIN); gcov_type cover_insns; int max_dup_insns; basic_block bb; + bool changed = false; /* Create an oversized sbitmap to reduce the chance that we need to resize it. */ - bb_seen = sbitmap_alloc (last_basic_block * 2); - sbitmap_zero (bb_seen); + bb_seen = sbitmap_alloc (last_basic_block_for_fn (cfun) * 2); + bitmap_clear (bb_seen); initialize_original_copy_tables (); - if (profile_info && flag_branch_probabilities) + if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ) probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK); else probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY); @@ -243,19 +278,18 @@ branch_ratio_cutoff = (REG_BR_PROB_BASE / 100 * PARAM_VALUE (TRACER_MIN_BRANCH_RATIO)); - FOR_EACH_BB (bb) + FOR_EACH_BB_FN (bb, cfun) { int n = count_insns (bb); if (!ignore_bb_p (bb)) - blocks[bb->index] = fibheap_insert (heap, -bb->frequency, - bb); + blocks[bb->index] = heap.insert (-bb->frequency, bb); counts [bb->index] = n; ninsns += n; weighted_insns += n * bb->frequency; } - if (profile_info && flag_branch_probabilities) + if (profile_info && profile_status_for_fn (cfun) == PROFILE_READ) cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE_FEEDBACK); else cover_insns = PARAM_VALUE (TRACER_DYNAMIC_COVERAGE); @@ -263,9 +297,9 @@ max_dup_insns = (ninsns * PARAM_VALUE (TRACER_MAX_CODE_GROWTH) + 50) / 100; while (traced_insns < cover_insns && nduplicated < max_dup_insns - && !fibheap_empty (heap)) + && !heap.empty ()) { - basic_block bb = (basic_block) fibheap_extract_min (heap); + basic_block bb = heap.extract_min (); int n, pos; if (!bb) @@ -283,7 +317,7 @@ traced_insns += bb->frequency * counts [bb->index]; if (blocks[bb->index]) { - fibheap_delete_node (heap, blocks[bb->index]); + heap.delete_node (blocks[bb->index]); blocks[bb->index] = NULL; } @@ -293,36 +327,32 @@ if (blocks[bb2->index]) { - fibheap_delete_node (heap, blocks[bb2->index]); + heap.delete_node (blocks[bb2->index]); blocks[bb2->index] = NULL; } traced_insns += bb2->frequency * counts [bb2->index]; if (EDGE_COUNT (bb2->preds) > 1 - && can_duplicate_block_p (bb2)) + && can_duplicate_block_p (bb2) + /* We have the tendency to duplicate the loop header + of all do { } while loops. Do not do that - it is + not profitable and it might create a loop with multiple + entries or at least rotate the loop. */ + && bb2->loop_father->header != bb2) { - edge e; - basic_block copy; - nduplicated += counts [bb2->index]; - - e = find_edge (bb, bb2); - - copy = duplicate_block (bb2, e, bb); - flush_pending_stmts (e); - - add_phi_args_after_copy (©, 1, NULL); + basic_block copy = transform_duplicate (bb, bb2); /* Reconsider the original copy of block we've duplicated. Removing the most common predecessor may make it to be head. */ - blocks[bb2->index] = - fibheap_insert (heap, -bb2->frequency, bb2); + blocks[bb2->index] = heap.insert (-bb2->frequency, bb2); if (dump_file) fprintf (dump_file, "Duplicated %i as %i [%i]\n", bb2->index, copy->index, copy->frequency); bb2 = copy; + changed = true; } mark_bb_seen (bb2); bb = bb2; @@ -340,61 +370,74 @@ free_original_copy_tables (); sbitmap_free (bb_seen); - free (blocks); free (trace); free (counts); - fibheap_delete (heap); + + return changed; } + +namespace { -/* Main entry point to this file. */ +const pass_data pass_data_tracer = +{ + GIMPLE_PASS, /* type */ + "tracer", /* name */ + OPTGROUP_NONE, /* optinfo_flags */ + TV_TRACER, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_update_ssa, /* todo_flags_finish */ +}; -static unsigned int -tracer (void) +class pass_tracer : public gimple_opt_pass { - gcc_assert (current_ir_type () == IR_GIMPLE); +public: + pass_tracer (gcc::context *ctxt) + : gimple_opt_pass (pass_data_tracer, ctxt) + {} + + /* opt_pass methods: */ + virtual bool gate (function *) + { + return (optimize > 0 && flag_tracer && flag_reorder_blocks); + } - if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) + virtual unsigned int execute (function *); + +}; // class pass_tracer + +unsigned int +pass_tracer::execute (function *fun) +{ + bool changed; + + if (n_basic_blocks_for_fn (fun) <= NUM_FIXED_BLOCKS + 1) return 0; mark_dfs_back_edges (); if (dump_file) - dump_flow_info (dump_file, dump_flags); + brief_dump_cfg (dump_file, dump_flags); /* Trace formation is done on the fly inside tail_duplicate */ - tail_duplicate (); - - /* FIXME: We really only need to do this when we know tail duplication - has altered the CFG. */ - free_dominance_info (CDI_DOMINATORS); - if (dump_file) - dump_flow_info (dump_file, dump_flags); + changed = tail_duplicate (); + if (changed) + { + free_dominance_info (CDI_DOMINATORS); + /* If we changed the CFG schedule loops for fixup by cleanup_cfg. */ + loops_state_set (LOOPS_NEED_FIXUP); + } - return 0; + if (dump_file) + brief_dump_cfg (dump_file, dump_flags); + + return changed ? TODO_cleanup_cfg : 0; } - -static bool -gate_tracer (void) -{ - return (optimize > 0 && flag_tracer && flag_reorder_blocks); -} +} // anon namespace -struct gimple_opt_pass pass_tracer = +gimple_opt_pass * +make_pass_tracer (gcc::context *ctxt) { - { - GIMPLE_PASS, - "tracer", /* name */ - gate_tracer, /* gate */ - tracer, /* execute */ - NULL, /* sub */ - NULL, /* next */ - 0, /* static_pass_number */ - TV_TRACER, /* tv_id */ - 0, /* properties_required */ - 0, /* properties_provided */ - 0, /* properties_destroyed */ - 0, /* todo_flags_start */ - TODO_dump_func - | TODO_update_ssa - | TODO_verify_ssa /* todo_flags_finish */ - } -}; + return new pass_tracer (ctxt); +}