comparison gcc/except.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Implements exception handling. 1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2 Copyright (C) 1989-2017 Free Software Foundation, Inc.
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>. 3 Contributed by Mike Stump <mrs@cygnus.com>.
6 4
7 This file is part of GCC. 5 This file is part of GCC.
8 6
9 GCC is free software; you can redistribute it and/or modify it under 7 GCC is free software; you can redistribute it and/or modify it under
79 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes 77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
80 that create an rtl to eh_region mapping that corresponds to the 78 that create an rtl to eh_region mapping that corresponds to the
81 gimple to eh_region mapping that had been recorded in the 79 gimple to eh_region mapping that had been recorded in the
82 THROW_STMT_TABLE. 80 THROW_STMT_TABLE.
83 81
84 During pass_rtl_eh (except.c), we generate the real landing pads 82 Then, via finish_eh_generation, we generate the real landing pads
85 to which the runtime will actually transfer control. These new 83 to which the runtime will actually transfer control. These new
86 landing pads perform whatever bookkeeping is needed by the target 84 landing pads perform whatever bookkeeping is needed by the target
87 backend in order to resume execution within the current function. 85 backend in order to resume execution within the current function.
88 Each of these new landing pads falls through into the post_landing_pad 86 Each of these new landing pads falls through into the post_landing_pad
89 label which had been used within the CFG up to this point. All 87 label which had been used within the CFG up to this point. All
112 110
113 111
114 #include "config.h" 112 #include "config.h"
115 #include "system.h" 113 #include "system.h"
116 #include "coretypes.h" 114 #include "coretypes.h"
117 #include "tm.h" 115 #include "backend.h"
116 #include "target.h"
118 #include "rtl.h" 117 #include "rtl.h"
119 #include "tree.h" 118 #include "tree.h"
120 #include "flags.h" 119 #include "cfghooks.h"
121 #include "function.h" 120 #include "tree-pass.h"
121 #include "memmodel.h"
122 #include "tm_p.h"
123 #include "stringpool.h"
124 #include "expmed.h"
125 #include "optabs.h"
126 #include "emit-rtl.h"
127 #include "cgraph.h"
128 #include "diagnostic.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
131 #include "explow.h"
132 #include "stmt.h"
122 #include "expr.h" 133 #include "expr.h"
134 #include "calls.h"
123 #include "libfuncs.h" 135 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h" 136 #include "except.h"
126 #include "integrate.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "output.h" 137 #include "output.h"
130 #include "dwarf2asm.h" 138 #include "dwarf2asm.h"
131 #include "dwarf2out.h" 139 #include "dwarf2out.h"
132 #include "dwarf2.h" 140 #include "common/common-target.h"
133 #include "toplev.h"
134 #include "hashtab.h"
135 #include "intl.h"
136 #include "ggc.h"
137 #include "tm_p.h"
138 #include "target.h"
139 #include "langhooks.h" 141 #include "langhooks.h"
140 #include "cgraph.h" 142 #include "cfgrtl.h"
141 #include "diagnostic.h"
142 #include "tree-pretty-print.h" 143 #include "tree-pretty-print.h"
143 #include "tree-pass.h" 144 #include "cfgloop.h"
144 #include "timevar.h" 145 #include "builtins.h"
145 #include "tree-flow.h" 146 #include "tree-hash-traits.h"
146
147 /* Provide defaults for stuff that may not be defined when using
148 sjlj exceptions. */
149 #ifndef EH_RETURN_DATA_REGNO
150 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
151 #endif
152 147
153 static GTY(()) int call_site_base; 148 static GTY(()) int call_site_base;
154 static GTY ((param_is (union tree_node))) 149
155 htab_t type_to_runtime_map; 150 static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
151
152 static GTY(()) tree setjmp_fn;
156 153
157 /* Describe the SjLj_Function_Context structure. */ 154 /* Describe the SjLj_Function_Context structure. */
158 static GTY(()) tree sjlj_fc_type_node; 155 static GTY(()) tree sjlj_fc_type_node;
159 static int sjlj_fc_call_site_ofs; 156 static int sjlj_fc_call_site_ofs;
160 static int sjlj_fc_data_ofs; 157 static int sjlj_fc_data_ofs;
166 struct GTY(()) call_site_record_d 163 struct GTY(()) call_site_record_d
167 { 164 {
168 rtx landing_pad; 165 rtx landing_pad;
169 int action; 166 int action;
170 }; 167 };
168
169 /* In the following structure and associated functions,
170 we represent entries in the action table as 1-based indices.
171 Special cases are:
172
173 0: null action record, non-null landing pad; implies cleanups
174 -1: null action record, null landing pad; implies no action
175 -2: no call-site entry; implies must_not_throw
176 -3: we have yet to process outer regions
177
178 Further, no special cases apply to the "next" field of the record.
179 For next, 0 means end of list. */
180
181 struct action_record
182 {
183 int offset;
184 int filter;
185 int next;
186 };
187
188 /* Hashtable helpers. */
189
190 struct action_record_hasher : free_ptr_hash <action_record>
191 {
192 static inline hashval_t hash (const action_record *);
193 static inline bool equal (const action_record *, const action_record *);
194 };
195
196 inline hashval_t
197 action_record_hasher::hash (const action_record *entry)
198 {
199 return entry->next * 1009 + entry->filter;
200 }
201
202 inline bool
203 action_record_hasher::equal (const action_record *entry,
204 const action_record *data)
205 {
206 return entry->filter == data->filter && entry->next == data->next;
207 }
208
209 typedef hash_table<action_record_hasher> action_hash_type;
171 210
172 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *, 211 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
173 eh_landing_pad *); 212 eh_landing_pad *);
174 213
175 static int t2r_eq (const void *, const void *);
176 static hashval_t t2r_hash (const void *);
177
178 static int ttypes_filter_eq (const void *, const void *);
179 static hashval_t ttypes_filter_hash (const void *);
180 static int ehspec_filter_eq (const void *, const void *);
181 static hashval_t ehspec_filter_hash (const void *);
182 static int add_ttypes_entry (htab_t, tree);
183 static int add_ehspec_entry (htab_t, htab_t, tree);
184 static void dw2_build_landing_pads (void); 214 static void dw2_build_landing_pads (void);
185 215
186 static int action_record_eq (const void *, const void *); 216 static int collect_one_action_chain (action_hash_type *, eh_region);
187 static hashval_t action_record_hash (const void *);
188 static int add_action_record (htab_t, int, int);
189 static int collect_one_action_chain (htab_t, eh_region);
190 static int add_call_site (rtx, int, int); 217 static int add_call_site (rtx, int, int);
191 218
192 static void push_uleb128 (VEC (uchar, gc) **, unsigned int); 219 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
193 static void push_sleb128 (VEC (uchar, gc) **, int); 220 static void push_sleb128 (vec<uchar, va_gc> **, int);
194 #ifndef HAVE_AS_LEB128
195 static int dw2_size_of_call_site_table (int); 221 static int dw2_size_of_call_site_table (int);
196 static int sjlj_size_of_call_site_table (void); 222 static int sjlj_size_of_call_site_table (void);
197 #endif
198 static void dw2_output_call_site_table (int, int); 223 static void dw2_output_call_site_table (int, int);
199 static void sjlj_output_call_site_table (void); 224 static void sjlj_output_call_site_table (void);
200 225
201 226
202 void 227 void
203 init_eh (void) 228 init_eh (void)
204 { 229 {
205 if (! flag_exceptions) 230 if (! flag_exceptions)
206 return; 231 return;
207 232
208 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL); 233 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
209 234
210 /* Create the SjLj_Function_Context structure. This should match 235 /* Create the SjLj_Function_Context structure. This should match
211 the definition in unwind-sjlj.c. */ 236 the definition in unwind-sjlj.c. */
212 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 237 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
213 { 238 {
214 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp; 239 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
215 240
216 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE); 241 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
217 242
223 f_cs = build_decl (BUILTINS_LOCATION, 248 f_cs = build_decl (BUILTINS_LOCATION,
224 FIELD_DECL, get_identifier ("__call_site"), 249 FIELD_DECL, get_identifier ("__call_site"),
225 integer_type_node); 250 integer_type_node);
226 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node; 251 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
227 252
228 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1)); 253 tmp = build_index_type (size_int (4 - 1));
229 tmp = build_array_type (lang_hooks.types.type_for_mode 254 tmp = build_array_type (lang_hooks.types.type_for_mode
230 (targetm.unwind_word_mode (), 1), 255 (targetm.unwind_word_mode (), 1),
231 tmp); 256 tmp);
232 f_data = build_decl (BUILTINS_LOCATION, 257 f_data = build_decl (BUILTINS_LOCATION,
233 FIELD_DECL, get_identifier ("__data"), tmp); 258 FIELD_DECL, get_identifier ("__data"), tmp);
243 ptr_type_node); 268 ptr_type_node);
244 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node; 269 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
245 270
246 #ifdef DONT_USE_BUILTIN_SETJMP 271 #ifdef DONT_USE_BUILTIN_SETJMP
247 #ifdef JMP_BUF_SIZE 272 #ifdef JMP_BUF_SIZE
248 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1); 273 tmp = size_int (JMP_BUF_SIZE - 1);
249 #else 274 #else
250 /* Should be large enough for most systems, if it is not, 275 /* Should be large enough for most systems, if it is not,
251 JMP_BUF_SIZE should be defined with the proper value. It will 276 JMP_BUF_SIZE should be defined with the proper value. It will
252 also tend to be larger than necessary for most systems, a more 277 also tend to be larger than necessary for most systems, a more
253 optimal port will define JMP_BUF_SIZE. */ 278 optimal port will define JMP_BUF_SIZE. */
254 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1); 279 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
255 #endif 280 #endif
256 #else 281 #else
257 /* builtin_setjmp takes a pointer to 5 words. */ 282 /* Compute a minimally sized jump buffer. We need room to store at
258 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1); 283 least 3 pointers - stack pointer, frame pointer and return address.
284 Plus for some targets we need room for an extra pointer - in the
285 case of MIPS this is the global pointer. This makes a total of four
286 pointers, but to be safe we actually allocate room for 5.
287
288 If pointers are smaller than words then we allocate enough room for
289 5 words, just in case the backend needs this much room. For more
290 discussion on this issue see:
291 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
292 if (POINTER_SIZE > BITS_PER_WORD)
293 tmp = size_int (5 - 1);
294 else
295 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
259 #endif 296 #endif
297
260 tmp = build_index_type (tmp); 298 tmp = build_index_type (tmp);
261 tmp = build_array_type (ptr_type_node, tmp); 299 tmp = build_array_type (ptr_type_node, tmp);
262 f_jbuf = build_decl (BUILTINS_LOCATION, 300 f_jbuf = build_decl (BUILTINS_LOCATION,
263 FIELD_DECL, get_identifier ("__jbuf"), tmp); 301 FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP 302 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the 303 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */ 304 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT; 305 SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
268 DECL_USER_ALIGN (f_jbuf) = 1; 306 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif 307 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node; 308 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271 309
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev; 310 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 layout_type (sjlj_fc_type_node); 317 layout_type (sjlj_fc_type_node);
280 318
281 /* Cache the interesting field offsets so that we have 319 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */ 320 easy access from rtl. */
283 sjlj_fc_call_site_ofs 321 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1) 322 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT); 323 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs 324 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1) 325 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT); 326 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs 327 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1) 328 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT); 329 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs 330 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1) 331 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT); 332 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs 333 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1) 334 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT); 335 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
336
337 #ifdef DONT_USE_BUILTIN_SETJMP
338 tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
339 NULL);
340 setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
341 get_identifier ("setjmp"), tmp);
342 TREE_PUBLIC (setjmp_fn) = 1;
343 DECL_EXTERNAL (setjmp_fn) = 1;
344 DECL_ASSEMBLER_NAME (setjmp_fn);
345 #endif
298 } 346 }
299 } 347 }
300 348
301 void 349 void
302 init_eh_for_function (void) 350 init_eh_for_function (void)
303 { 351 {
304 cfun->eh = ggc_alloc_cleared_eh_status (); 352 cfun->eh = ggc_cleared_alloc<eh_status> ();
305 353
306 /* Make sure zero'th entries are used. */ 354 /* Make sure zero'th entries are used. */
307 VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL); 355 vec_safe_push (cfun->eh->region_array, (eh_region)0);
308 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL); 356 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
309 } 357 }
310 358
311 /* Routines to generate the exception tree somewhat directly. 359 /* Routines to generate the exception tree somewhat directly.
312 These are used from tree-eh.c when processing exception related 360 These are used from tree-eh.c when processing exception related
313 nodes during tree optimization. */ 361 nodes during tree optimization. */
316 gen_eh_region (enum eh_region_type type, eh_region outer) 364 gen_eh_region (enum eh_region_type type, eh_region outer)
317 { 365 {
318 eh_region new_eh; 366 eh_region new_eh;
319 367
320 /* Insert a new blank region as a leaf in the tree. */ 368 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = ggc_alloc_cleared_eh_region_d (); 369 new_eh = ggc_cleared_alloc<eh_region_d> ();
322 new_eh->type = type; 370 new_eh->type = type;
323 new_eh->outer = outer; 371 new_eh->outer = outer;
324 if (outer) 372 if (outer)
325 { 373 {
326 new_eh->next_peer = outer->inner; 374 new_eh->next_peer = outer->inner;
330 { 378 {
331 new_eh->next_peer = cfun->eh->region_tree; 379 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh; 380 cfun->eh->region_tree = new_eh;
333 } 381 }
334 382
335 new_eh->index = VEC_length (eh_region, cfun->eh->region_array); 383 new_eh->index = vec_safe_length (cfun->eh->region_array);
336 VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh); 384 vec_safe_push (cfun->eh->region_array, new_eh);
337 385
338 /* Copy the language's notion of whether to use __cxa_end_cleanup. */ 386 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
339 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup) 387 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
340 new_eh->use_cxa_end_cleanup = true; 388 new_eh->use_cxa_end_cleanup = true;
341 389
373 type_node = type_list; 421 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node)) 422 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node)); 423 add_type_for_runtime (TREE_VALUE (type_node));
376 } 424 }
377 425
378 c = ggc_alloc_cleared_eh_catch_d (); 426 c = ggc_cleared_alloc<eh_catch_d> ();
379 c->type_list = type_list; 427 c->type_list = type_list;
380 l = t->u.eh_try.last_catch; 428 l = t->u.eh_try.last_catch;
381 c->prev_catch = l; 429 c->prev_catch = l;
382 if (l) 430 if (l)
383 l->next_catch = c; 431 l->next_catch = c;
407 } 455 }
408 456
409 eh_landing_pad 457 eh_landing_pad
410 gen_eh_landing_pad (eh_region region) 458 gen_eh_landing_pad (eh_region region)
411 { 459 {
412 eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d (); 460 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
413 461
414 lp->next_lp = region->landing_pads; 462 lp->next_lp = region->landing_pads;
415 lp->region = region; 463 lp->region = region;
416 lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array); 464 lp->index = vec_safe_length (cfun->eh->lp_array);
417 region->landing_pads = lp; 465 region->landing_pads = lp;
418 466
419 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp); 467 vec_safe_push (cfun->eh->lp_array, lp);
420 468
421 return lp; 469 return lp;
422 } 470 }
423 471
424 eh_region 472 eh_region
425 get_eh_region_from_number_fn (struct function *ifun, int i) 473 get_eh_region_from_number_fn (struct function *ifun, int i)
426 { 474 {
427 return VEC_index (eh_region, ifun->eh->region_array, i); 475 return (*ifun->eh->region_array)[i];
428 } 476 }
429 477
430 eh_region 478 eh_region
431 get_eh_region_from_number (int i) 479 get_eh_region_from_number (int i)
432 { 480 {
434 } 482 }
435 483
436 eh_landing_pad 484 eh_landing_pad
437 get_eh_landing_pad_from_number_fn (struct function *ifun, int i) 485 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
438 { 486 {
439 return VEC_index (eh_landing_pad, ifun->eh->lp_array, i); 487 return (*ifun->eh->lp_array)[i];
440 } 488 }
441 489
442 eh_landing_pad 490 eh_landing_pad
443 get_eh_landing_pad_from_number (int i) 491 get_eh_landing_pad_from_number (int i)
444 { 492 {
447 495
448 eh_region 496 eh_region
449 get_eh_region_from_lp_number_fn (struct function *ifun, int i) 497 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
450 { 498 {
451 if (i < 0) 499 if (i < 0)
452 return VEC_index (eh_region, ifun->eh->region_array, -i); 500 return (*ifun->eh->region_array)[-i];
453 else if (i == 0) 501 else if (i == 0)
454 return NULL; 502 return NULL;
455 else 503 else
456 { 504 {
457 eh_landing_pad lp; 505 eh_landing_pad lp;
458 lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i); 506 lp = (*ifun->eh->lp_array)[i];
459 return lp->region; 507 return lp->region;
460 } 508 }
461 } 509 }
462 510
463 eh_region 511 eh_region
479 527
480 struct duplicate_eh_regions_data 528 struct duplicate_eh_regions_data
481 { 529 {
482 duplicate_eh_regions_map label_map; 530 duplicate_eh_regions_map label_map;
483 void *label_map_data; 531 void *label_map_data;
484 struct pointer_map_t *eh_map; 532 hash_map<void *, void *> *eh_map;
485 }; 533 };
486 534
487 static void 535 static void
488 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data, 536 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
489 eh_region old_r, eh_region outer) 537 eh_region old_r, eh_region outer)
490 { 538 {
491 eh_landing_pad old_lp, new_lp; 539 eh_landing_pad old_lp, new_lp;
492 eh_region new_r; 540 eh_region new_r;
493 void **slot;
494 541
495 new_r = gen_eh_region (old_r->type, outer); 542 new_r = gen_eh_region (old_r->type, outer);
496 slot = pointer_map_insert (data->eh_map, (void *)old_r); 543 gcc_assert (!data->eh_map->put (old_r, new_r));
497 gcc_assert (*slot == NULL);
498 *slot = (void *)new_r;
499 544
500 switch (old_r->type) 545 switch (old_r->type)
501 { 546 {
502 case ERT_CLEANUP: 547 case ERT_CLEANUP:
503 break; 548 break;
524 else 569 else
525 new_r->u.allowed.label = NULL_TREE; 570 new_r->u.allowed.label = NULL_TREE;
526 break; 571 break;
527 572
528 case ERT_MUST_NOT_THROW: 573 case ERT_MUST_NOT_THROW:
529 new_r->u.must_not_throw = old_r->u.must_not_throw; 574 new_r->u.must_not_throw.failure_loc =
575 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
576 new_r->u.must_not_throw.failure_decl =
577 old_r->u.must_not_throw.failure_decl;
530 break; 578 break;
531 } 579 }
532 580
533 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp) 581 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
534 { 582 {
535 /* Don't bother copying unused landing pads. */ 583 /* Don't bother copying unused landing pads. */
536 if (old_lp->post_landing_pad == NULL) 584 if (old_lp->post_landing_pad == NULL)
537 continue; 585 continue;
538 586
539 new_lp = gen_eh_landing_pad (new_r); 587 new_lp = gen_eh_landing_pad (new_r);
540 slot = pointer_map_insert (data->eh_map, (void *)old_lp); 588 gcc_assert (!data->eh_map->put (old_lp, new_lp));
541 gcc_assert (*slot == NULL);
542 *slot = (void *)new_lp;
543 589
544 new_lp->post_landing_pad 590 new_lp->post_landing_pad
545 = data->label_map (old_lp->post_landing_pad, data->label_map_data); 591 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
546 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index; 592 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
547 } 593 }
558 The special case of COPY_REGION of NULL means all regions. 604 The special case of COPY_REGION of NULL means all regions.
559 Remap labels using MAP/MAP_DATA callback. Return a pointer map 605 Remap labels using MAP/MAP_DATA callback. Return a pointer map
560 that allows the caller to remap uses of both EH regions and 606 that allows the caller to remap uses of both EH regions and
561 EH landing pads. */ 607 EH landing pads. */
562 608
563 struct pointer_map_t * 609 hash_map<void *, void *> *
564 duplicate_eh_regions (struct function *ifun, 610 duplicate_eh_regions (struct function *ifun,
565 eh_region copy_region, int outer_lp, 611 eh_region copy_region, int outer_lp,
566 duplicate_eh_regions_map map, void *map_data) 612 duplicate_eh_regions_map map, void *map_data)
567 { 613 {
568 struct duplicate_eh_regions_data data; 614 struct duplicate_eh_regions_data data;
569 eh_region outer_region; 615 eh_region outer_region;
570 616
571 #ifdef ENABLE_CHECKING 617 if (flag_checking)
572 verify_eh_tree (ifun); 618 verify_eh_tree (ifun);
573 #endif
574 619
575 data.label_map = map; 620 data.label_map = map;
576 data.label_map_data = map_data; 621 data.label_map_data = map_data;
577 data.eh_map = pointer_map_create (); 622 data.eh_map = new hash_map<void *, void *>;
578 623
579 outer_region = get_eh_region_from_lp_number (outer_lp); 624 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
580 625
581 /* Copy all the regions in the subtree. */ 626 /* Copy all the regions in the subtree. */
582 if (copy_region) 627 if (copy_region)
583 duplicate_eh_regions_1 (&data, copy_region, outer_region); 628 duplicate_eh_regions_1 (&data, copy_region, outer_region);
584 else 629 else
586 eh_region r; 631 eh_region r;
587 for (r = ifun->eh->region_tree; r ; r = r->next_peer) 632 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
588 duplicate_eh_regions_1 (&data, r, outer_region); 633 duplicate_eh_regions_1 (&data, r, outer_region);
589 } 634 }
590 635
591 #ifdef ENABLE_CHECKING 636 if (flag_checking)
592 verify_eh_tree (cfun); 637 verify_eh_tree (cfun);
593 #endif
594 638
595 return data.eh_map; 639 return data.eh_map;
596 } 640 }
597 641
598 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */ 642 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
599 643
600 eh_region 644 eh_region
601 eh_region_outermost (struct function *ifun, eh_region region_a, 645 eh_region_outermost (struct function *ifun, eh_region region_a,
602 eh_region region_b) 646 eh_region region_b)
603 { 647 {
604 sbitmap b_outer;
605
606 gcc_assert (ifun->eh->region_array); 648 gcc_assert (ifun->eh->region_array);
607 gcc_assert (ifun->eh->region_tree); 649 gcc_assert (ifun->eh->region_tree);
608 650
609 b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array)); 651 auto_sbitmap b_outer (ifun->eh->region_array->length ());
610 sbitmap_zero (b_outer); 652 bitmap_clear (b_outer);
611 653
612 do 654 do
613 { 655 {
614 SET_BIT (b_outer, region_b->index); 656 bitmap_set_bit (b_outer, region_b->index);
615 region_b = region_b->outer; 657 region_b = region_b->outer;
616 } 658 }
617 while (region_b); 659 while (region_b);
618 660
619 do 661 do
620 { 662 {
621 if (TEST_BIT (b_outer, region_a->index)) 663 if (bitmap_bit_p (b_outer, region_a->index))
622 break; 664 break;
623 region_a = region_a->outer; 665 region_a = region_a->outer;
624 } 666 }
625 while (region_a); 667 while (region_a);
626 668
627 sbitmap_free (b_outer);
628 return region_a; 669 return region_a;
629 } 670 }
630 671
631 static int
632 t2r_eq (const void *pentry, const void *pdata)
633 {
634 const_tree const entry = (const_tree) pentry;
635 const_tree const data = (const_tree) pdata;
636
637 return TREE_PURPOSE (entry) == data;
638 }
639
640 static hashval_t
641 t2r_hash (const void *pentry)
642 {
643 const_tree const entry = (const_tree) pentry;
644 return TREE_HASH (TREE_PURPOSE (entry));
645 }
646
647 void 672 void
648 add_type_for_runtime (tree type) 673 add_type_for_runtime (tree type)
649 { 674 {
650 tree *slot;
651
652 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */ 675 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
653 if (TREE_CODE (type) == NOP_EXPR) 676 if (TREE_CODE (type) == NOP_EXPR)
654 return; 677 return;
655 678
656 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type, 679 bool existed = false;
657 TREE_HASH (type), INSERT); 680 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
658 if (*slot == NULL) 681 if (!existed)
659 { 682 *slot = lang_hooks.eh_runtime_type (type);
660 tree runtime = lang_hooks.eh_runtime_type (type);
661 *slot = tree_cons (type, runtime, NULL_TREE);
662 }
663 } 683 }
664 684
665 tree 685 tree
666 lookup_type_for_runtime (tree type) 686 lookup_type_for_runtime (tree type)
667 { 687 {
668 tree *slot;
669
670 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */ 688 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
671 if (TREE_CODE (type) == NOP_EXPR) 689 if (TREE_CODE (type) == NOP_EXPR)
672 return type; 690 return type;
673 691
674 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
675 TREE_HASH (type), NO_INSERT);
676
677 /* We should have always inserted the data earlier. */ 692 /* We should have always inserted the data earlier. */
678 return TREE_VALUE (*slot); 693 return *type_to_runtime_map->get (type);
679 } 694 }
680 695
681 696
682 /* Represent an entry in @TTypes for either catch actions 697 /* Represent an entry in @TTypes for either catch actions
683 or exception filter actions. */ 698 or exception filter actions. */
684 struct ttypes_filter { 699 struct ttypes_filter {
685 tree t; 700 tree t;
686 int filter; 701 int filter;
687 }; 702 };
688 703
704 /* Helper for ttypes_filter hashing. */
705
706 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
707 {
708 typedef tree_node *compare_type;
709 static inline hashval_t hash (const ttypes_filter *);
710 static inline bool equal (const ttypes_filter *, const tree_node *);
711 };
712
689 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA 713 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
690 (a tree) for a @TTypes type node we are thinking about adding. */ 714 (a tree) for a @TTypes type node we are thinking about adding. */
691 715
692 static int 716 inline bool
693 ttypes_filter_eq (const void *pentry, const void *pdata) 717 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
694 { 718 {
695 const struct ttypes_filter *const entry
696 = (const struct ttypes_filter *) pentry;
697 const_tree const data = (const_tree) pdata;
698
699 return entry->t == data; 719 return entry->t == data;
700 } 720 }
701 721
702 static hashval_t 722 inline hashval_t
703 ttypes_filter_hash (const void *pentry) 723 ttypes_filter_hasher::hash (const ttypes_filter *entry)
704 { 724 {
705 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
706 return TREE_HASH (entry->t); 725 return TREE_HASH (entry->t);
707 } 726 }
727
728 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
729
730
731 /* Helper for ehspec hashing. */
732
733 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
734 {
735 static inline hashval_t hash (const ttypes_filter *);
736 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
737 };
708 738
709 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes 739 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
710 exception specification list we are thinking about adding. */ 740 exception specification list we are thinking about adding. */
711 /* ??? Currently we use the type lists in the order given. Someone 741 /* ??? Currently we use the type lists in the order given. Someone
712 should put these in some canonical order. */ 742 should put these in some canonical order. */
713 743
714 static int 744 inline bool
715 ehspec_filter_eq (const void *pentry, const void *pdata) 745 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
716 { 746 {
717 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
718 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
719
720 return type_list_equal (entry->t, data->t); 747 return type_list_equal (entry->t, data->t);
721 } 748 }
722 749
723 /* Hash function for exception specification lists. */ 750 /* Hash function for exception specification lists. */
724 751
725 static hashval_t 752 inline hashval_t
726 ehspec_filter_hash (const void *pentry) 753 ehspec_hasher::hash (const ttypes_filter *entry)
727 { 754 {
728 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
729 hashval_t h = 0; 755 hashval_t h = 0;
730 tree list; 756 tree list;
731 757
732 for (list = entry->t; list ; list = TREE_CHAIN (list)) 758 for (list = entry->t; list ; list = TREE_CHAIN (list))
733 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list)); 759 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
734 return h; 760 return h;
735 } 761 }
736 762
763 typedef hash_table<ehspec_hasher> ehspec_hash_type;
764
765
737 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH 766 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
738 to speed up the search. Return the filter value to be used. */ 767 to speed up the search. Return the filter value to be used. */
739 768
740 static int 769 static int
741 add_ttypes_entry (htab_t ttypes_hash, tree type) 770 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
742 { 771 {
743 struct ttypes_filter **slot, *n; 772 struct ttypes_filter **slot, *n;
744 773
745 slot = (struct ttypes_filter **) 774 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
746 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT); 775 INSERT);
747 776
748 if ((n = *slot) == NULL) 777 if ((n = *slot) == NULL)
749 { 778 {
750 /* Filter value is a 1 based table index. */ 779 /* Filter value is a 1 based table index. */
751 780
752 n = XNEW (struct ttypes_filter); 781 n = XNEW (struct ttypes_filter);
753 n->t = type; 782 n->t = type;
754 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1; 783 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
755 *slot = n; 784 *slot = n;
756 785
757 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type); 786 vec_safe_push (cfun->eh->ttype_data, type);
758 } 787 }
759 788
760 return n->filter; 789 return n->filter;
761 } 790 }
762 791
763 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH 792 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
764 to speed up the search. Return the filter value to be used. */ 793 to speed up the search. Return the filter value to be used. */
765 794
766 static int 795 static int
767 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list) 796 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
797 tree list)
768 { 798 {
769 struct ttypes_filter **slot, *n; 799 struct ttypes_filter **slot, *n;
770 struct ttypes_filter dummy; 800 struct ttypes_filter dummy;
771 801
772 dummy.t = list; 802 dummy.t = list;
773 slot = (struct ttypes_filter **) 803 slot = ehspec_hash->find_slot (&dummy, INSERT);
774 htab_find_slot (ehspec_hash, &dummy, INSERT);
775 804
776 if ((n = *slot) == NULL) 805 if ((n = *slot) == NULL)
777 { 806 {
778 int len; 807 int len;
779 808
780 if (targetm.arm_eabi_unwinder) 809 if (targetm.arm_eabi_unwinder)
781 len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi); 810 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
782 else 811 else
783 len = VEC_length (uchar, cfun->eh->ehspec_data.other); 812 len = vec_safe_length (cfun->eh->ehspec_data.other);
784 813
785 /* Filter value is a -1 based byte index into a uleb128 buffer. */ 814 /* Filter value is a -1 based byte index into a uleb128 buffer. */
786 815
787 n = XNEW (struct ttypes_filter); 816 n = XNEW (struct ttypes_filter);
788 n->t = list; 817 n->t = list;
791 820
792 /* Generate a 0 terminated list of filter values. */ 821 /* Generate a 0 terminated list of filter values. */
793 for (; list ; list = TREE_CHAIN (list)) 822 for (; list ; list = TREE_CHAIN (list))
794 { 823 {
795 if (targetm.arm_eabi_unwinder) 824 if (targetm.arm_eabi_unwinder)
796 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, 825 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
797 TREE_VALUE (list));
798 else 826 else
799 { 827 {
800 /* Look up each type in the list and encode its filter 828 /* Look up each type in the list and encode its filter
801 value as a uleb128. */ 829 value as a uleb128. */
802 push_uleb128 (&cfun->eh->ehspec_data.other, 830 push_uleb128 (&cfun->eh->ehspec_data.other,
803 add_ttypes_entry (ttypes_hash, TREE_VALUE (list))); 831 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
804 } 832 }
805 } 833 }
806 if (targetm.arm_eabi_unwinder) 834 if (targetm.arm_eabi_unwinder)
807 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE); 835 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
808 else 836 else
809 VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0); 837 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
810 } 838 }
811 839
812 return n->filter; 840 return n->filter;
813 } 841 }
814 842
819 847
820 void 848 void
821 assign_filter_values (void) 849 assign_filter_values (void)
822 { 850 {
823 int i; 851 int i;
824 htab_t ttypes, ehspec;
825 eh_region r; 852 eh_region r;
826 eh_catch c; 853 eh_catch c;
827 854
828 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16); 855 vec_alloc (cfun->eh->ttype_data, 16);
829 if (targetm.arm_eabi_unwinder) 856 if (targetm.arm_eabi_unwinder)
830 cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64); 857 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
831 else 858 else
832 cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64); 859 vec_alloc (cfun->eh->ehspec_data.other, 64);
833 860
834 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free); 861 ehspec_hash_type ehspec (31);
835 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free); 862 ttypes_hash_type ttypes (31);
836 863
837 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i) 864 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
838 { 865 {
839 if (r == NULL) 866 if (r == NULL)
840 continue; 867 continue;
841 868
842 switch (r->type) 869 switch (r->type)
854 them in the region's dedicated list. */ 881 them in the region's dedicated list. */
855 tree tp_node = c->type_list; 882 tree tp_node = c->type_list;
856 883
857 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node)) 884 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
858 { 885 {
859 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node)); 886 int flt
860 tree flt_node = build_int_cst (NULL_TREE, flt); 887 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
888 tree flt_node = build_int_cst (integer_type_node, flt);
861 889
862 c->filter_list 890 c->filter_list
863 = tree_cons (NULL_TREE, flt_node, c->filter_list); 891 = tree_cons (NULL_TREE, flt_node, c->filter_list);
864 } 892 }
865 } 893 }
866 else 894 else
867 { 895 {
868 /* Get a filter value for the NULL list also since it 896 /* Get a filter value for the NULL list also since it
869 will need an action record anyway. */ 897 will need an action record anyway. */
870 int flt = add_ttypes_entry (ttypes, NULL); 898 int flt = add_ttypes_entry (&ttypes, NULL);
871 tree flt_node = build_int_cst (NULL_TREE, flt); 899 tree flt_node = build_int_cst (integer_type_node, flt);
872 900
873 c->filter_list 901 c->filter_list
874 = tree_cons (NULL_TREE, flt_node, NULL); 902 = tree_cons (NULL_TREE, flt_node, NULL);
875 } 903 }
876 } 904 }
877 break; 905 break;
878 906
879 case ERT_ALLOWED_EXCEPTIONS: 907 case ERT_ALLOWED_EXCEPTIONS:
880 r->u.allowed.filter 908 r->u.allowed.filter
881 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list); 909 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
882 break; 910 break;
883 911
884 default: 912 default:
885 break; 913 break;
886 } 914 }
887 } 915 }
888
889 htab_delete (ttypes);
890 htab_delete (ehspec);
891 } 916 }
892 917
893 /* Emit SEQ into basic block just before INSN (that is assumed to be 918 /* Emit SEQ into basic block just before INSN (that is assumed to be
894 first instruction of some existing BB and return the newly 919 first instruction of some existing BB and return the newly
895 produced block. */ 920 produced block. */
896 static basic_block 921 static basic_block
897 emit_to_new_bb_before (rtx seq, rtx insn) 922 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
898 { 923 {
899 rtx last; 924 rtx_insn *last;
900 basic_block bb; 925 basic_block bb;
901 edge e; 926 edge e;
902 edge_iterator ei; 927 edge_iterator ei;
903 928
904 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg 929 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
916 update_bb_for_insn (bb); 941 update_bb_for_insn (bb);
917 bb->flags |= BB_SUPERBLOCK; 942 bb->flags |= BB_SUPERBLOCK;
918 return bb; 943 return bb;
919 } 944 }
920 945
946 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
947 at the rtl level. Emit the code required by the target at a landing
948 pad for the given region. */
949
950 void
951 expand_dw2_landing_pad_for_region (eh_region region)
952 {
953 if (targetm.have_exception_receiver ())
954 emit_insn (targetm.gen_exception_receiver ());
955 else if (targetm.have_nonlocal_goto_receiver ())
956 emit_insn (targetm.gen_nonlocal_goto_receiver ());
957 else
958 { /* Nothing */ }
959
960 if (region->exc_ptr_reg)
961 emit_move_insn (region->exc_ptr_reg,
962 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
963 if (region->filter_reg)
964 emit_move_insn (region->filter_reg,
965 gen_rtx_REG (targetm.eh_return_filter_mode (),
966 EH_RETURN_DATA_REGNO (1)));
967 }
968
921 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */ 969 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
922 970
923 static void 971 static void
924 dw2_build_landing_pads (void) 972 dw2_build_landing_pads (void)
925 { 973 {
926 int i; 974 int i;
927 eh_landing_pad lp; 975 eh_landing_pad lp;
928 976 int e_flags = EDGE_FALLTHRU;
929 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) 977
930 { 978 /* If we're going to partition blocks, we need to be able to add
931 eh_region region; 979 new landing pads later, which means that we need to hold on to
980 the post-landing-pad block. Prevent it from being merged away.
981 We'll remove this bit after partitioning. */
982 if (flag_reorder_blocks_and_partition)
983 e_flags |= EDGE_PRESERVE;
984
985 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
986 {
932 basic_block bb; 987 basic_block bb;
933 rtx seq; 988 rtx_insn *seq;
934 edge e;
935 989
936 if (lp == NULL || lp->post_landing_pad == NULL) 990 if (lp == NULL || lp->post_landing_pad == NULL)
937 continue; 991 continue;
938 992
939 start_sequence (); 993 start_sequence ();
940 994
941 lp->landing_pad = gen_label_rtx (); 995 lp->landing_pad = gen_label_rtx ();
942 emit_label (lp->landing_pad); 996 emit_label (lp->landing_pad);
943 LABEL_PRESERVE_P (lp->landing_pad) = 1; 997 LABEL_PRESERVE_P (lp->landing_pad) = 1;
944 998
945 #ifdef HAVE_exception_receiver 999 expand_dw2_landing_pad_for_region (lp->region);
946 if (HAVE_exception_receiver)
947 emit_insn (gen_exception_receiver ());
948 else
949 #endif
950 #ifdef HAVE_nonlocal_goto_receiver
951 if (HAVE_nonlocal_goto_receiver)
952 emit_insn (gen_nonlocal_goto_receiver ());
953 else
954 #endif
955 { /* Nothing */ }
956
957 region = lp->region;
958 if (region->exc_ptr_reg)
959 emit_move_insn (region->exc_ptr_reg,
960 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
961 if (region->filter_reg)
962 emit_move_insn (region->filter_reg,
963 gen_rtx_REG (targetm.eh_return_filter_mode (),
964 EH_RETURN_DATA_REGNO (1)));
965 1000
966 seq = get_insns (); 1001 seq = get_insns ();
967 end_sequence (); 1002 end_sequence ();
968 1003
969 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad)); 1004 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
970 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); 1005 bb->count = bb->next_bb->count;
971 e->count = bb->count; 1006 bb->frequency = bb->next_bb->frequency;
972 e->probability = REG_BR_PROB_BASE; 1007 make_single_succ_edge (bb, bb->next_bb, e_flags);
1008 if (current_loops)
1009 {
1010 struct loop *loop = bb->next_bb->loop_father;
1011 /* If we created a pre-header block, add the new block to the
1012 outer loop, otherwise to the loop itself. */
1013 if (bb->next_bb == loop->header)
1014 add_bb_to_loop (bb, loop_outer (loop));
1015 else
1016 add_bb_to_loop (bb, loop);
1017 }
973 } 1018 }
974 } 1019 }
975 1020
976 1021
977 static VEC (int, heap) *sjlj_lp_call_site_index; 1022 static vec<int> sjlj_lp_call_site_index;
978 1023
979 /* Process all active landing pads. Assign each one a compact dispatch 1024 /* Process all active landing pads. Assign each one a compact dispatch
980 index, and a call-site index. */ 1025 index, and a call-site index. */
981 1026
982 static int 1027 static int
983 sjlj_assign_call_site_values (void) 1028 sjlj_assign_call_site_values (void)
984 { 1029 {
985 htab_t ar_hash; 1030 action_hash_type ar_hash (31);
986 int i, disp_index; 1031 int i, disp_index;
987 eh_landing_pad lp; 1032 eh_landing_pad lp;
988 1033
989 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64); 1034 vec_alloc (crtl->eh.action_record_data, 64);
990 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
991 1035
992 disp_index = 0; 1036 disp_index = 0;
993 call_site_base = 1; 1037 call_site_base = 1;
994 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) 1038 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
995 if (lp && lp->post_landing_pad) 1039 if (lp && lp->post_landing_pad)
996 { 1040 {
997 int action, call_site; 1041 int action, call_site;
998 1042
999 /* First: build the action table. */ 1043 /* First: build the action table. */
1000 action = collect_one_action_chain (ar_hash, lp->region); 1044 action = collect_one_action_chain (&ar_hash, lp->region);
1001 1045
1002 /* Next: assign call-site values. If dwarf2 terms, this would be 1046 /* Next: assign call-site values. If dwarf2 terms, this would be
1003 the region number assigned by convert_to_eh_region_ranges, but 1047 the region number assigned by convert_to_eh_region_ranges, but
1004 handles no-action and must-not-throw differently. */ 1048 handles no-action and must-not-throw differently. */
1005 /* Map must-not-throw to otherwise unused call-site index 0. */ 1049 /* Map must-not-throw to otherwise unused call-site index 0. */
1009 else if (action == -1) 1053 else if (action == -1)
1010 call_site = -1; 1054 call_site = -1;
1011 /* Otherwise, look it up in the table. */ 1055 /* Otherwise, look it up in the table. */
1012 else 1056 else
1013 call_site = add_call_site (GEN_INT (disp_index), action, 0); 1057 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1014 VEC_replace (int, sjlj_lp_call_site_index, i, call_site); 1058 sjlj_lp_call_site_index[i] = call_site;
1015 1059
1016 disp_index++; 1060 disp_index++;
1017 } 1061 }
1018 1062
1019 htab_delete (ar_hash);
1020
1021 return disp_index; 1063 return disp_index;
1022 } 1064 }
1023 1065
1024 /* Emit code to record the current call-site index before every 1066 /* Emit code to record the current call-site index before every
1025 insn that can throw. */ 1067 insn that can throw. */
1026 1068
1027 static void 1069 static void
1028 sjlj_mark_call_sites (void) 1070 sjlj_mark_call_sites (void)
1029 { 1071 {
1030 int last_call_site = -2; 1072 int last_call_site = -2;
1031 rtx insn, mem; 1073 rtx_insn *insn;
1074 rtx mem;
1032 1075
1033 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn)) 1076 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1034 { 1077 {
1035 eh_landing_pad lp; 1078 eh_landing_pad lp;
1036 eh_region r; 1079 eh_region r;
1037 bool nothrow; 1080 bool nothrow;
1038 int this_call_site; 1081 int this_call_site;
1039 rtx before, p; 1082 rtx_insn *before, *p;
1040 1083
1041 /* Reset value tracking at extended basic block boundaries. */ 1084 /* Reset value tracking at extended basic block boundaries. */
1042 if (LABEL_P (insn)) 1085 if (LABEL_P (insn))
1043 last_call_site = -2; 1086 last_call_site = -2;
1087
1088 /* If the function allocates dynamic stack space, the context must
1089 be updated after every allocation/deallocation accordingly. */
1090 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1091 {
1092 rtx buf_addr;
1093
1094 start_sequence ();
1095 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1096 sjlj_fc_jbuf_ofs);
1097 expand_builtin_update_setjmp_buf (buf_addr);
1098 p = get_insns ();
1099 end_sequence ();
1100 emit_insn_before (p, insn);
1101 }
1044 1102
1045 if (! INSN_P (insn)) 1103 if (! INSN_P (insn))
1046 continue; 1104 continue;
1047 1105
1048 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp); 1106 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1049 if (nothrow) 1107 if (nothrow)
1050 continue; 1108 continue;
1051 if (lp) 1109 if (lp)
1052 this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index); 1110 this_call_site = sjlj_lp_call_site_index[lp->index];
1053 else if (r == NULL) 1111 else if (r == NULL)
1054 { 1112 {
1055 /* Calls (and trapping insns) without notes are outside any 1113 /* Calls (and trapping insns) without notes are outside any
1056 exception handling region in this function. Mark them as 1114 exception handling region in this function. Mark them as
1057 no action. */ 1115 no action. */
1070 continue; 1128 continue;
1071 1129
1072 /* Don't separate a call from it's argument loads. */ 1130 /* Don't separate a call from it's argument loads. */
1073 before = insn; 1131 before = insn;
1074 if (CALL_P (insn)) 1132 if (CALL_P (insn))
1075 before = find_first_parameter_load (insn, NULL_RTX); 1133 before = find_first_parameter_load (insn, NULL);
1076 1134
1077 start_sequence (); 1135 start_sequence ();
1078 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node), 1136 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1079 sjlj_fc_call_site_ofs); 1137 sjlj_fc_call_site_ofs);
1080 emit_move_insn (mem, GEN_INT (this_call_site)); 1138 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1081 p = get_insns (); 1139 p = get_insns ();
1082 end_sequence (); 1140 end_sequence ();
1083 1141
1084 emit_insn_before (p, before); 1142 emit_insn_before (p, before);
1085 last_call_site = this_call_site; 1143 last_call_site = this_call_site;
1087 } 1145 }
1088 1146
1089 /* Construct the SjLj_Function_Context. */ 1147 /* Construct the SjLj_Function_Context. */
1090 1148
1091 static void 1149 static void
1092 sjlj_emit_function_enter (rtx dispatch_label) 1150 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1093 { 1151 {
1094 rtx fn_begin, fc, mem, seq; 1152 rtx_insn *fn_begin, *seq;
1153 rtx fc, mem;
1095 bool fn_begin_outside_block; 1154 bool fn_begin_outside_block;
1096 rtx personality = get_personality_function (current_function_decl); 1155 rtx personality = get_personality_function (current_function_decl);
1097 1156
1098 fc = crtl->eh.sjlj_fc; 1157 fc = crtl->eh.sjlj_fc;
1099 1158
1120 else 1179 else
1121 emit_move_insn (mem, const0_rtx); 1180 emit_move_insn (mem, const0_rtx);
1122 1181
1123 if (dispatch_label) 1182 if (dispatch_label)
1124 { 1183 {
1184 rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1185
1125 #ifdef DONT_USE_BUILTIN_SETJMP 1186 #ifdef DONT_USE_BUILTIN_SETJMP
1126 rtx x, last; 1187 addr = copy_addr_to_reg (addr);
1127 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE, 1188 addr = convert_memory_address (ptr_mode, addr);
1128 TYPE_MODE (integer_type_node), 1, 1189 tree addr_tree = make_tree (ptr_type_node, addr);
1129 plus_constant (XEXP (fc, 0), 1190
1130 sjlj_fc_jbuf_ofs), Pmode); 1191 tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
1192 rtx x = expand_call (call_expr, NULL_RTX, false);
1131 1193
1132 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0, 1194 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1133 TYPE_MODE (integer_type_node), 0, 1195 TYPE_MODE (integer_type_node), 0,
1134 dispatch_label); 1196 dispatch_label,
1135 last = get_last_insn (); 1197 profile_probability::unlikely ());
1136 if (JUMP_P (last) && any_condjump_p (last))
1137 {
1138 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1139 add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1140 }
1141 #else 1198 #else
1142 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), 1199 expand_builtin_setjmp_setup (addr, dispatch_label);
1143 sjlj_fc_jbuf_ofs),
1144 dispatch_label);
1145 #endif 1200 #endif
1146 } 1201 }
1147 1202
1148 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode, 1203 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1149 1, XEXP (fc, 0), Pmode); 1204 XEXP (fc, 0), Pmode);
1150 1205
1151 seq = get_insns (); 1206 seq = get_insns ();
1152 end_sequence (); 1207 end_sequence ();
1153 1208
1154 /* ??? Instead of doing this at the beginning of the function, 1209 /* ??? Instead of doing this at the beginning of the function,
1163 break; 1218 break;
1164 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin)) 1219 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1165 fn_begin_outside_block = false; 1220 fn_begin_outside_block = false;
1166 } 1221 }
1167 1222
1223 #ifdef DONT_USE_BUILTIN_SETJMP
1224 if (dispatch_label)
1225 {
1226 /* The sequence contains a branch in the middle so we need to force
1227 the creation of a new basic block by means of BB_SUPERBLOCK. */
1228 if (fn_begin_outside_block)
1229 {
1230 basic_block bb
1231 = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1232 if (JUMP_P (BB_END (bb)))
1233 emit_insn_before (seq, BB_END (bb));
1234 else
1235 emit_insn_after (seq, BB_END (bb));
1236 }
1237 else
1238 emit_insn_after (seq, fn_begin);
1239
1240 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1241 return;
1242 }
1243 #endif
1244
1168 if (fn_begin_outside_block) 1245 if (fn_begin_outside_block)
1169 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR)); 1246 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1170 else 1247 else
1171 emit_insn_after (seq, fn_begin); 1248 emit_insn_after (seq, fn_begin);
1172 } 1249 }
1173 1250
1174 /* Call back from expand_function_end to know where we should put 1251 /* Call back from expand_function_end to know where we should put
1175 the call to unwind_sjlj_unregister_libfunc if needed. */ 1252 the call to unwind_sjlj_unregister_libfunc if needed. */
1176 1253
1177 void 1254 void
1178 sjlj_emit_function_exit_after (rtx after) 1255 sjlj_emit_function_exit_after (rtx_insn *after)
1179 { 1256 {
1180 crtl->eh.sjlj_exit_after = after; 1257 crtl->eh.sjlj_exit_after = after;
1181 } 1258 }
1182 1259
1183 static void 1260 static void
1184 sjlj_emit_function_exit (void) 1261 sjlj_emit_function_exit (void)
1185 { 1262 {
1186 rtx seq, insn; 1263 rtx_insn *seq, *insn;
1187 1264
1188 start_sequence (); 1265 start_sequence ();
1189 1266
1190 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode, 1267 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1191 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode); 1268 XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1192 1269
1193 seq = get_insns (); 1270 seq = get_insns ();
1194 end_sequence (); 1271 end_sequence ();
1195 1272
1196 /* ??? Really this can be done in any block at loop level 0 that 1273 /* ??? Really this can be done in any block at loop level 0 that
1203 1280
1204 emit_insn_after (seq, insn); 1281 emit_insn_after (seq, insn);
1205 } 1282 }
1206 1283
1207 static void 1284 static void
1208 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch) 1285 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1209 { 1286 {
1210 enum machine_mode unwind_word_mode = targetm.unwind_word_mode (); 1287 scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1211 enum machine_mode filter_mode = targetm.eh_return_filter_mode (); 1288 scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1212 eh_landing_pad lp; 1289 eh_landing_pad lp;
1213 rtx mem, seq, fc, before, exc_ptr_reg, filter_reg; 1290 rtx mem, fc, exc_ptr_reg, filter_reg;
1214 rtx first_reachable_label; 1291 rtx_insn *seq;
1215 basic_block bb; 1292 basic_block bb;
1216 eh_region r; 1293 eh_region r;
1217 edge e;
1218 int i, disp_index; 1294 int i, disp_index;
1219 gimple switch_stmt; 1295 vec<tree> dispatch_labels = vNULL;
1220 1296
1221 fc = crtl->eh.sjlj_fc; 1297 fc = crtl->eh.sjlj_fc;
1222 1298
1223 start_sequence (); 1299 start_sequence ();
1224 1300
1231 making sure that the label doesn't vanish. The only other caller 1307 making sure that the label doesn't vanish. The only other caller
1232 is the expander for __builtin_setjmp_receiver, which places this 1308 is the expander for __builtin_setjmp_receiver, which places this
1233 label on the nonlocal_goto_label list. Since we're modeling these 1309 label on the nonlocal_goto_label list. Since we're modeling these
1234 CFG edges more exactly, we can use the forced_labels list instead. */ 1310 CFG edges more exactly, we can use the forced_labels list instead. */
1235 LABEL_PRESERVE_P (dispatch_label) = 1; 1311 LABEL_PRESERVE_P (dispatch_label) = 1;
1236 forced_labels 1312 vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1237 = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1238 #endif 1313 #endif
1239 1314
1240 /* Load up exc_ptr and filter values from the function context. */ 1315 /* Load up exc_ptr and filter values from the function context. */
1241 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs); 1316 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1242 if (unwind_word_mode != ptr_mode) 1317 if (unwind_word_mode != ptr_mode)
1256 filter_reg = force_reg (filter_mode, mem); 1331 filter_reg = force_reg (filter_mode, mem);
1257 1332
1258 /* Jump to one of the directly reachable regions. */ 1333 /* Jump to one of the directly reachable regions. */
1259 1334
1260 disp_index = 0; 1335 disp_index = 0;
1261 first_reachable_label = NULL; 1336 rtx_code_label *first_reachable_label = NULL;
1262 1337
1263 /* If there's exactly one call site in the function, don't bother 1338 /* If there's exactly one call site in the function, don't bother
1264 generating a switch statement. */ 1339 generating a switch statement. */
1265 switch_stmt = NULL;
1266 if (num_dispatch > 1) 1340 if (num_dispatch > 1)
1267 { 1341 dispatch_labels.create (num_dispatch);
1268 tree disp; 1342
1269 1343 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1270 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1271 sjlj_fc_call_site_ofs);
1272 disp = make_tree (integer_type_node, mem);
1273
1274 switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
1275 }
1276
1277 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1278 if (lp && lp->post_landing_pad) 1344 if (lp && lp->post_landing_pad)
1279 { 1345 {
1280 rtx seq2, label; 1346 rtx_insn *seq2;
1347 rtx_code_label *label;
1281 1348
1282 start_sequence (); 1349 start_sequence ();
1283 1350
1284 lp->landing_pad = dispatch_label; 1351 lp->landing_pad = dispatch_label;
1285 1352
1286 if (num_dispatch > 1) 1353 if (num_dispatch > 1)
1287 { 1354 {
1288 tree t_label, case_elt; 1355 tree t_label, case_elt, t;
1289 1356
1290 t_label = create_artificial_label (UNKNOWN_LOCATION); 1357 t_label = create_artificial_label (UNKNOWN_LOCATION);
1291 case_elt = build3 (CASE_LABEL_EXPR, void_type_node, 1358 t = build_int_cst (integer_type_node, disp_index);
1292 build_int_cst (NULL, disp_index), 1359 case_elt = build_case_label (t, NULL, t_label);
1293 NULL, t_label); 1360 dispatch_labels.quick_push (case_elt);
1294 gimple_switch_set_label (switch_stmt, disp_index, case_elt); 1361 label = jump_target_rtx (t_label);
1295
1296 label = label_rtx (t_label);
1297 } 1362 }
1298 else 1363 else
1299 label = gen_label_rtx (); 1364 label = gen_label_rtx ();
1300 1365
1301 if (disp_index == 0) 1366 if (disp_index == 0)
1309 emit_move_insn (r->filter_reg, filter_reg); 1374 emit_move_insn (r->filter_reg, filter_reg);
1310 1375
1311 seq2 = get_insns (); 1376 seq2 = get_insns ();
1312 end_sequence (); 1377 end_sequence ();
1313 1378
1314 before = label_rtx (lp->post_landing_pad); 1379 rtx_insn *before = label_rtx (lp->post_landing_pad);
1315 bb = emit_to_new_bb_before (seq2, before); 1380 bb = emit_to_new_bb_before (seq2, before);
1316 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); 1381 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1317 e->count = bb->count; 1382 if (current_loops)
1318 e->probability = REG_BR_PROB_BASE; 1383 {
1384 struct loop *loop = bb->next_bb->loop_father;
1385 /* If we created a pre-header block, add the new block to the
1386 outer loop, otherwise to the loop itself. */
1387 if (bb->next_bb == loop->header)
1388 add_bb_to_loop (bb, loop_outer (loop));
1389 else
1390 add_bb_to_loop (bb, loop);
1391 /* ??? For multiple dispatches we will end up with edges
1392 from the loop tree root into this loop, making it a
1393 multiple-entry loop. Discard all affected loops. */
1394 if (num_dispatch > 1)
1395 {
1396 for (loop = bb->loop_father;
1397 loop_outer (loop); loop = loop_outer (loop))
1398 mark_loop_for_removal (loop);
1399 }
1400 }
1319 1401
1320 disp_index++; 1402 disp_index++;
1321 } 1403 }
1322 gcc_assert (disp_index == num_dispatch); 1404 gcc_assert (disp_index == num_dispatch);
1323 1405
1324 if (num_dispatch > 1) 1406 if (num_dispatch > 1)
1325 { 1407 {
1326 expand_case (switch_stmt); 1408 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1327 expand_builtin_trap (); 1409 sjlj_fc_call_site_ofs);
1410 expand_sjlj_dispatch_table (disp, dispatch_labels);
1328 } 1411 }
1329 1412
1330 seq = get_insns (); 1413 seq = get_insns ();
1331 end_sequence (); 1414 end_sequence ();
1332 1415
1333 bb = emit_to_new_bb_before (seq, first_reachable_label); 1416 bb = emit_to_new_bb_before (seq, first_reachable_label);
1334 if (num_dispatch == 1) 1417 if (num_dispatch == 1)
1335 { 1418 {
1336 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); 1419 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1337 e->count = bb->count; 1420 if (current_loops)
1338 e->probability = REG_BR_PROB_BASE; 1421 {
1422 struct loop *loop = bb->next_bb->loop_father;
1423 /* If we created a pre-header block, add the new block to the
1424 outer loop, otherwise to the loop itself. */
1425 if (bb->next_bb == loop->header)
1426 add_bb_to_loop (bb, loop_outer (loop));
1427 else
1428 add_bb_to_loop (bb, loop);
1429 }
1430 }
1431 else
1432 {
1433 /* We are not wiring up edges here, but as the dispatcher call
1434 is at function begin simply associate the block with the
1435 outermost (non-)loop. */
1436 if (current_loops)
1437 add_bb_to_loop (bb, current_loops->tree_root);
1339 } 1438 }
1340 } 1439 }
1341 1440
1342 static void 1441 static void
1343 sjlj_build_landing_pads (void) 1442 sjlj_build_landing_pads (void)
1344 { 1443 {
1345 int num_dispatch; 1444 int num_dispatch;
1346 1445
1347 num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array); 1446 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1348 if (num_dispatch == 0) 1447 if (num_dispatch == 0)
1349 return; 1448 return;
1350 VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch); 1449 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1351 1450
1352 num_dispatch = sjlj_assign_call_site_values (); 1451 num_dispatch = sjlj_assign_call_site_values ();
1353 if (num_dispatch > 0) 1452 if (num_dispatch > 0)
1354 { 1453 {
1355 rtx dispatch_label = gen_label_rtx (); 1454 rtx_code_label *dispatch_label = gen_label_rtx ();
1356 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node, 1455 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1357 TYPE_MODE (sjlj_fc_type_node), 1456 TYPE_MODE (sjlj_fc_type_node),
1358 TYPE_ALIGN (sjlj_fc_type_node)); 1457 TYPE_ALIGN (sjlj_fc_type_node));
1359 crtl->eh.sjlj_fc 1458 crtl->eh.sjlj_fc
1360 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node), 1459 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1378 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node), 1477 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1379 int_size_in_bytes (sjlj_fc_type_node), 1478 int_size_in_bytes (sjlj_fc_type_node),
1380 align); 1479 align);
1381 1480
1382 sjlj_mark_call_sites (); 1481 sjlj_mark_call_sites ();
1383 sjlj_emit_function_enter (NULL_RTX); 1482 sjlj_emit_function_enter (NULL);
1384 sjlj_emit_function_exit (); 1483 sjlj_emit_function_exit ();
1385 } 1484 }
1386 1485
1387 VEC_free (int, heap, sjlj_lp_call_site_index); 1486 sjlj_lp_call_site_index.release ();
1487 }
1488
1489 /* Update the sjlj function context. This function should be called
1490 whenever we allocate or deallocate dynamic stack space. */
1491
1492 void
1493 update_sjlj_context (void)
1494 {
1495 if (!flag_exceptions)
1496 return;
1497
1498 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1388 } 1499 }
1389 1500
1390 /* After initial rtl generation, call back to finish generating 1501 /* After initial rtl generation, call back to finish generating
1391 exception support code. */ 1502 exception support code. */
1392 1503
1393 static void 1504 void
1394 finish_eh_generation (void) 1505 finish_eh_generation (void)
1395 { 1506 {
1396 basic_block bb; 1507 basic_block bb;
1397 1508
1398 /* Construct the landing pads. */ 1509 /* Construct the landing pads. */
1399 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 1510 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1400 sjlj_build_landing_pads (); 1511 sjlj_build_landing_pads ();
1401 else 1512 else
1402 dw2_build_landing_pads (); 1513 dw2_build_landing_pads ();
1403 break_superblocks (); 1514 break_superblocks ();
1404 1515
1405 if (targetm.except_unwind_info (&global_options) == UI_SJLJ 1516 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1406 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */ 1517 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1407 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r) 1518 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1408 commit_edge_insertions (); 1519 commit_edge_insertions ();
1409 1520
1410 /* Redirect all EH edges from the post_landing_pad to the landing pad. */ 1521 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1411 FOR_EACH_BB (bb) 1522 FOR_EACH_BB_FN (bb, cfun)
1412 { 1523 {
1413 eh_landing_pad lp; 1524 eh_landing_pad lp;
1414 edge_iterator ei; 1525 edge_iterator ei;
1415 edge e; 1526 edge e;
1416 1527
1435 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL 1546 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1436 : EDGE_ABNORMAL); 1547 : EDGE_ABNORMAL);
1437 } 1548 }
1438 } 1549 }
1439 } 1550 }
1440
1441 static bool
1442 gate_handle_eh (void)
1443 {
1444 /* Nothing to do if no regions created. */
1445 return cfun->eh->region_tree != NULL;
1446 }
1447
1448 /* Complete generation of exception handling code. */
1449 static unsigned int
1450 rest_of_handle_eh (void)
1451 {
1452 finish_eh_generation ();
1453 cleanup_cfg (CLEANUP_NO_INSN_DEL);
1454 return 0;
1455 }
1456
1457 struct rtl_opt_pass pass_rtl_eh =
1458 {
1459 {
1460 RTL_PASS,
1461 "rtl eh", /* name */
1462 gate_handle_eh, /* gate */
1463 rest_of_handle_eh, /* execute */
1464 NULL, /* sub */
1465 NULL, /* next */
1466 0, /* static_pass_number */
1467 TV_JUMP, /* tv_id */
1468 0, /* properties_required */
1469 0, /* properties_provided */
1470 0, /* properties_destroyed */
1471 0, /* todo_flags_start */
1472 TODO_dump_func /* todo_flags_finish */
1473 }
1474 };
1475 1551
1476 /* This section handles removing dead code for flow. */ 1552 /* This section handles removing dead code for flow. */
1477 1553
1478 void 1554 void
1479 remove_eh_landing_pad (eh_landing_pad lp) 1555 remove_eh_landing_pad (eh_landing_pad lp)
1484 continue; 1560 continue;
1485 *pp = lp->next_lp; 1561 *pp = lp->next_lp;
1486 1562
1487 if (lp->post_landing_pad) 1563 if (lp->post_landing_pad)
1488 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0; 1564 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1489 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL); 1565 (*cfun->eh->lp_array)[lp->index] = NULL;
1490 } 1566 }
1491 1567
1492 /* Splice REGION from the region tree. */ 1568 /* Splice the EH region at PP from the region tree. */
1493 1569
1494 void 1570 static void
1495 remove_eh_handler (eh_region region) 1571 remove_eh_handler_splicer (eh_region *pp)
1496 { 1572 {
1497 eh_region *pp, *pp_start, p, outer; 1573 eh_region region = *pp;
1498 eh_landing_pad lp; 1574 eh_landing_pad lp;
1499 1575
1500 for (lp = region->landing_pads; lp ; lp = lp->next_lp) 1576 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1501 { 1577 {
1502 if (lp->post_landing_pad) 1578 if (lp->post_landing_pad)
1503 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0; 1579 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1504 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL); 1580 (*cfun->eh->lp_array)[lp->index] = NULL;
1505 } 1581 }
1506 1582
1507 outer = region->outer;
1508 if (outer)
1509 pp_start = &outer->inner;
1510 else
1511 pp_start = &cfun->eh->region_tree;
1512 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1513 continue;
1514 if (region->inner) 1583 if (region->inner)
1515 { 1584 {
1585 eh_region p, outer;
1586 outer = region->outer;
1587
1516 *pp = p = region->inner; 1588 *pp = p = region->inner;
1517 do 1589 do
1518 { 1590 {
1519 p->outer = outer; 1591 p->outer = outer;
1520 pp = &p->next_peer; 1592 pp = &p->next_peer;
1522 } 1594 }
1523 while (p); 1595 while (p);
1524 } 1596 }
1525 *pp = region->next_peer; 1597 *pp = region->next_peer;
1526 1598
1527 VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL); 1599 (*cfun->eh->region_array)[region->index] = NULL;
1600 }
1601
1602 /* Splice a single EH region REGION from the region tree.
1603
1604 To unlink REGION, we need to find the pointer to it with a relatively
1605 expensive search in REGION's outer region. If you are going to
1606 remove a number of handlers, using remove_unreachable_eh_regions may
1607 be a better option. */
1608
1609 void
1610 remove_eh_handler (eh_region region)
1611 {
1612 eh_region *pp, *pp_start, p, outer;
1613
1614 outer = region->outer;
1615 if (outer)
1616 pp_start = &outer->inner;
1617 else
1618 pp_start = &cfun->eh->region_tree;
1619 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1620 continue;
1621
1622 remove_eh_handler_splicer (pp);
1623 }
1624
1625 /* Worker for remove_unreachable_eh_regions.
1626 PP is a pointer to the region to start a region tree depth-first
1627 search from. R_REACHABLE is the set of regions that have to be
1628 preserved. */
1629
1630 static void
1631 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1632 {
1633 while (*pp)
1634 {
1635 eh_region region = *pp;
1636 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1637 if (!bitmap_bit_p (r_reachable, region->index))
1638 remove_eh_handler_splicer (pp);
1639 else
1640 pp = &region->next_peer;
1641 }
1642 }
1643
1644 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1645 Do this by traversing the EH tree top-down and splice out regions that
1646 are not marked. By removing regions from the leaves, we avoid costly
1647 searches in the region tree. */
1648
1649 void
1650 remove_unreachable_eh_regions (sbitmap r_reachable)
1651 {
1652 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1528 } 1653 }
1529 1654
1530 /* Invokes CALLBACK for every exception handler landing pad label. 1655 /* Invokes CALLBACK for every exception handler landing pad label.
1531 Only used by reload hackery; should not be used by new code. */ 1656 Only used by reload hackery; should not be used by new code. */
1532 1657
1534 for_each_eh_label (void (*callback) (rtx)) 1659 for_each_eh_label (void (*callback) (rtx))
1535 { 1660 {
1536 eh_landing_pad lp; 1661 eh_landing_pad lp;
1537 int i; 1662 int i;
1538 1663
1539 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) 1664 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1540 { 1665 {
1541 if (lp) 1666 if (lp)
1542 { 1667 {
1543 rtx lab = lp->landing_pad; 1668 rtx_code_label *lab = lp->landing_pad;
1544 if (lab && LABEL_P (lab)) 1669 if (lab && LABEL_P (lab))
1545 (*callback) (lab); 1670 (*callback) (lab);
1546 } 1671 }
1547 } 1672 }
1548 } 1673 }
1570 on the call_insn directly. Perhaps we should make more use of 1695 on the call_insn directly. Perhaps we should make more use of
1571 attaching the trees to call_insns (reachable via symbol_ref in 1696 attaching the trees to call_insns (reachable via symbol_ref in
1572 direct call cases) and just pull the data out of the trees. */ 1697 direct call cases) and just pull the data out of the trees. */
1573 1698
1574 void 1699 void
1575 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr) 1700 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1576 { 1701 {
1577 rtx value; 1702 rtx value;
1578 if (ecf_flags & ECF_NOTHROW) 1703 if (ecf_flags & ECF_NOTHROW)
1579 value = const0_rtx; 1704 value = const0_rtx;
1580 else if (lp_nr != 0) 1705 else if (lp_nr != 0)
1587 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw 1712 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1588 nor perform a non-local goto. Replace the region note if it 1713 nor perform a non-local goto. Replace the region note if it
1589 already exists. */ 1714 already exists. */
1590 1715
1591 void 1716 void
1592 make_reg_eh_region_note_nothrow_nononlocal (rtx insn) 1717 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1593 { 1718 {
1594 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 1719 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1595 rtx intmin = GEN_INT (INT_MIN); 1720 rtx intmin = GEN_INT (INT_MIN);
1596 1721
1597 if (note != 0) 1722 if (note != 0)
1618 /* Copy an REG_EH_REGION note to each insn that might throw beginning 1743 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1619 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn 1744 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1620 to look for a note, or the note itself. */ 1745 to look for a note, or the note itself. */
1621 1746
1622 void 1747 void
1623 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last) 1748 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1624 { 1749 {
1625 rtx insn, note = note_or_insn; 1750 rtx_insn *insn;
1751 rtx note = note_or_insn;
1626 1752
1627 if (INSN_P (note_or_insn)) 1753 if (INSN_P (note_or_insn))
1628 { 1754 {
1629 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX); 1755 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1630 if (note == NULL) 1756 if (note == NULL)
1639 } 1765 }
1640 1766
1641 /* Likewise, but iterate backward. */ 1767 /* Likewise, but iterate backward. */
1642 1768
1643 void 1769 void
1644 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first) 1770 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1645 { 1771 {
1646 rtx insn, note = note_or_insn; 1772 rtx_insn *insn;
1773 rtx note = note_or_insn;
1647 1774
1648 if (INSN_P (note_or_insn)) 1775 if (INSN_P (note_or_insn))
1649 { 1776 {
1650 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX); 1777 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1651 if (note == NULL) 1778 if (note == NULL)
1692 ret = true; 1819 ret = true;
1693 goto egress; 1820 goto egress;
1694 } 1821 }
1695 1822
1696 if (lp_nr < 0) 1823 if (lp_nr < 0)
1697 r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr); 1824 r = (*cfun->eh->region_array)[-lp_nr];
1698 else 1825 else
1699 { 1826 {
1700 lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr); 1827 lp = (*cfun->eh->lp_array)[lp_nr];
1701 r = lp->region; 1828 r = lp->region;
1702 } 1829 }
1703 1830
1704 egress: 1831 egress:
1705 *plp = lp; 1832 *plp = lp;
1754 return false; 1881 return false;
1755 1882
1756 if (NONJUMP_INSN_P (insn) 1883 if (NONJUMP_INSN_P (insn)
1757 && GET_CODE (PATTERN (insn)) == SEQUENCE) 1884 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1758 { 1885 {
1759 rtx seq = PATTERN (insn); 1886 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1760 int i, n = XVECLEN (seq, 0); 1887 int i, n = seq->len ();
1761 1888
1762 for (i = 0; i < n; i++) 1889 for (i = 0; i < n; i++)
1763 if (can_throw_external (XVECEXP (seq, 0, i))) 1890 if (can_throw_external (seq->element (i)))
1764 return true; 1891 return true;
1765 1892
1766 return false; 1893 return false;
1767 } 1894 }
1768 1895
1798 return true; 1925 return true;
1799 1926
1800 if (NONJUMP_INSN_P (insn) 1927 if (NONJUMP_INSN_P (insn)
1801 && GET_CODE (PATTERN (insn)) == SEQUENCE) 1928 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1802 { 1929 {
1803 rtx seq = PATTERN (insn); 1930 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1804 int i, n = XVECLEN (seq, 0); 1931 int i, n = seq->len ();
1805 1932
1806 for (i = 0; i < n; i++) 1933 for (i = 0; i < n; i++)
1807 if (!insn_nothrow_p (XVECEXP (seq, 0, i))) 1934 if (!insn_nothrow_p (seq->element (i)))
1808 return false; 1935 return false;
1809 1936
1810 return true; 1937 return true;
1811 } 1938 }
1812 1939
1815 1942
1816 /* Return true if INSN can perform a non-local goto. */ 1943 /* Return true if INSN can perform a non-local goto. */
1817 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */ 1944 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1818 1945
1819 bool 1946 bool
1820 can_nonlocal_goto (const_rtx insn) 1947 can_nonlocal_goto (const rtx_insn *insn)
1821 { 1948 {
1822 if (nonlocal_goto_handler_labels && CALL_P (insn)) 1949 if (nonlocal_goto_handler_labels && CALL_P (insn))
1823 { 1950 {
1824 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); 1951 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1825 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN) 1952 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1831 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */ 1958 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1832 1959
1833 static unsigned int 1960 static unsigned int
1834 set_nothrow_function_flags (void) 1961 set_nothrow_function_flags (void)
1835 { 1962 {
1836 rtx insn; 1963 rtx_insn *insn;
1837 1964
1838 crtl->nothrow = 1; 1965 crtl->nothrow = 1;
1839 1966
1840 /* Assume crtl->all_throwers_are_sibcalls until we encounter 1967 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1841 something that can throw an exception. We specifically exempt 1968 something that can throw an exception. We specifically exempt
1864 crtl->all_throwers_are_sibcalls = 0; 1991 crtl->all_throwers_are_sibcalls = 0;
1865 return 0; 1992 return 0;
1866 } 1993 }
1867 } 1994 }
1868 1995
1869 for (insn = crtl->epilogue_delay_list; insn;
1870 insn = XEXP (insn, 1))
1871 if (can_throw_external (insn))
1872 {
1873 crtl->nothrow = 0;
1874
1875 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1876 {
1877 crtl->all_throwers_are_sibcalls = 0;
1878 return 0;
1879 }
1880 }
1881 if (crtl->nothrow 1996 if (crtl->nothrow
1882 && (cgraph_function_body_availability (cgraph_node 1997 && (cgraph_node::get (current_function_decl)->get_availability ()
1883 (current_function_decl))
1884 >= AVAIL_AVAILABLE)) 1998 >= AVAIL_AVAILABLE))
1885 { 1999 {
1886 struct cgraph_node *node = cgraph_node (current_function_decl); 2000 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1887 struct cgraph_edge *e; 2001 struct cgraph_edge *e;
1888 for (e = node->callers; e; e = e->next_caller) 2002 for (e = node->callers; e; e = e->next_caller)
1889 e->can_throw_external = false; 2003 e->can_throw_external = false;
1890 cgraph_set_nothrow_flag (node, true); 2004 node->set_nothrow_flag (true);
1891 2005
1892 if (dump_file) 2006 if (dump_file)
1893 fprintf (dump_file, "Marking function nothrow: %s\n\n", 2007 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1894 current_function_name ()); 2008 current_function_name ());
1895 } 2009 }
1896 return 0; 2010 return 0;
1897 } 2011 }
1898 2012
1899 struct rtl_opt_pass pass_set_nothrow_function_flags = 2013 namespace {
1900 { 2014
1901 { 2015 const pass_data pass_data_set_nothrow_function_flags =
1902 RTL_PASS, 2016 {
1903 "nothrow", /* name */ 2017 RTL_PASS, /* type */
1904 NULL, /* gate */ 2018 "nothrow", /* name */
1905 set_nothrow_function_flags, /* execute */ 2019 OPTGROUP_NONE, /* optinfo_flags */
1906 NULL, /* sub */ 2020 TV_NONE, /* tv_id */
1907 NULL, /* next */ 2021 0, /* properties_required */
1908 0, /* static_pass_number */ 2022 0, /* properties_provided */
1909 TV_NONE, /* tv_id */ 2023 0, /* properties_destroyed */
1910 0, /* properties_required */ 2024 0, /* todo_flags_start */
1911 0, /* properties_provided */ 2025 0, /* todo_flags_finish */
1912 0, /* properties_destroyed */
1913 0, /* todo_flags_start */
1914 TODO_dump_func, /* todo_flags_finish */
1915 }
1916 }; 2026 };
2027
2028 class pass_set_nothrow_function_flags : public rtl_opt_pass
2029 {
2030 public:
2031 pass_set_nothrow_function_flags (gcc::context *ctxt)
2032 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2033 {}
2034
2035 /* opt_pass methods: */
2036 virtual unsigned int execute (function *)
2037 {
2038 return set_nothrow_function_flags ();
2039 }
2040
2041 }; // class pass_set_nothrow_function_flags
2042
2043 } // anon namespace
2044
2045 rtl_opt_pass *
2046 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2047 {
2048 return new pass_set_nothrow_function_flags (ctxt);
2049 }
1917 2050
1918 2051
1919 /* Various hooks for unwind library. */ 2052 /* Various hooks for unwind library. */
1920 2053
1921 /* Expand the EH support builtin functions: 2054 /* Expand the EH support builtin functions:
1925 expand_builtin_eh_common (tree region_nr_t) 2058 expand_builtin_eh_common (tree region_nr_t)
1926 { 2059 {
1927 HOST_WIDE_INT region_nr; 2060 HOST_WIDE_INT region_nr;
1928 eh_region region; 2061 eh_region region;
1929 2062
1930 gcc_assert (host_integerp (region_nr_t, 0)); 2063 gcc_assert (tree_fits_shwi_p (region_nr_t));
1931 region_nr = tree_low_cst (region_nr_t, 0); 2064 region_nr = tree_to_shwi (region_nr_t);
1932 2065
1933 region = VEC_index (eh_region, cfun->eh->region_array, region_nr); 2066 region = (*cfun->eh->region_array)[region_nr];
1934 2067
1935 /* ??? We shouldn't have been able to delete a eh region without 2068 /* ??? We shouldn't have been able to delete a eh region without
1936 deleting all the code that depended on it. */ 2069 deleting all the code that depended on it. */
1937 gcc_assert (region != NULL); 2070 gcc_assert (region != NULL);
1938 2071
1971 { 2104 {
1972 eh_region dst 2105 eh_region dst
1973 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)); 2106 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1974 eh_region src 2107 eh_region src
1975 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1)); 2108 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
1976 enum machine_mode fmode = targetm.eh_return_filter_mode (); 2109 scalar_int_mode fmode = targetm.eh_return_filter_mode ();
1977 2110
1978 if (dst->exc_ptr_reg == NULL) 2111 if (dst->exc_ptr_reg == NULL)
1979 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode); 2112 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1980 if (src->exc_ptr_reg == NULL) 2113 if (src->exc_ptr_reg == NULL)
1981 src->exc_ptr_reg = gen_reg_rtx (ptr_mode); 2114 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1999 { 2132 {
2000 /* Set this so all the registers get saved in our frame; we need to be 2133 /* Set this so all the registers get saved in our frame; we need to be
2001 able to copy the saved values for any registers from frames we unwind. */ 2134 able to copy the saved values for any registers from frames we unwind. */
2002 crtl->saves_all_registers = 1; 2135 crtl->saves_all_registers = 1;
2003 2136
2004 #ifdef SETUP_FRAME_ADDRESSES
2005 SETUP_FRAME_ADDRESSES (); 2137 SETUP_FRAME_ADDRESSES ();
2006 #endif
2007 } 2138 }
2008 2139
2009 /* Map a non-negative number to an eh return data register number; expands 2140 /* Map a non-negative number to an eh return data register number; expands
2010 to -1 if no return data register is associated with the input number. 2141 to -1 if no return data register is associated with the input number.
2011 At least the inputs 0 and 1 must be mapped; the target may provide more. */ 2142 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2020 { 2151 {
2021 error ("argument of %<__builtin_eh_return_regno%> must be constant"); 2152 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2022 return constm1_rtx; 2153 return constm1_rtx;
2023 } 2154 }
2024 2155
2025 iwhich = tree_low_cst (which, 1); 2156 iwhich = tree_to_uhwi (which);
2026 iwhich = EH_RETURN_DATA_REGNO (iwhich); 2157 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2027 if (iwhich == INVALID_REGNUM) 2158 if (iwhich == INVALID_REGNUM)
2028 return constm1_rtx; 2159 return constm1_rtx;
2029 2160
2030 #ifdef DWARF_FRAME_REGNUM 2161 #ifdef DWARF_FRAME_REGNUM
2053 addr = convert_to_mode (Pmode, addr, 0); 2184 addr = convert_to_mode (Pmode, addr, 0);
2054 #endif 2185 #endif
2055 } 2186 }
2056 2187
2057 /* First mask out any unwanted bits. */ 2188 /* First mask out any unwanted bits. */
2058 #ifdef MASK_RETURN_ADDR 2189 rtx mask = MASK_RETURN_ADDR;
2059 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr); 2190 if (mask)
2060 #endif 2191 expand_and (Pmode, addr, mask, addr);
2061 2192
2062 /* Then adjust to find the real return address. */ 2193 /* Then adjust to find the real return address. */
2063 #if defined (RETURN_ADDR_OFFSET) 2194 if (RETURN_ADDR_OFFSET)
2064 addr = plus_constant (addr, RETURN_ADDR_OFFSET); 2195 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2065 #endif
2066 2196
2067 return addr; 2197 return addr;
2068 } 2198 }
2069 2199
2070 /* Given an actual address in addr_tree, do any necessary encoding 2200 /* Given an actual address in addr_tree, do any necessary encoding
2076 { 2206 {
2077 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL); 2207 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2078 2208
2079 addr = convert_memory_address (Pmode, addr); 2209 addr = convert_memory_address (Pmode, addr);
2080 2210
2081 #ifdef RETURN_ADDR_OFFSET 2211 if (RETURN_ADDR_OFFSET)
2082 addr = force_reg (Pmode, addr); 2212 {
2083 addr = plus_constant (addr, -RETURN_ADDR_OFFSET); 2213 addr = force_reg (Pmode, addr);
2084 #endif 2214 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2215 }
2085 2216
2086 return addr; 2217 return addr;
2087 } 2218 }
2088 2219
2089 /* Set up the epilogue with the magic bits we'll need to return to the 2220 /* Set up the epilogue with the magic bits we'll need to return to the
2098 #ifdef EH_RETURN_STACKADJ_RTX 2229 #ifdef EH_RETURN_STACKADJ_RTX
2099 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj, 2230 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2100 VOIDmode, EXPAND_NORMAL); 2231 VOIDmode, EXPAND_NORMAL);
2101 tmp = convert_memory_address (Pmode, tmp); 2232 tmp = convert_memory_address (Pmode, tmp);
2102 if (!crtl->eh.ehr_stackadj) 2233 if (!crtl->eh.ehr_stackadj)
2103 crtl->eh.ehr_stackadj = copy_to_reg (tmp); 2234 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2104 else if (tmp != crtl->eh.ehr_stackadj) 2235 else if (tmp != crtl->eh.ehr_stackadj)
2105 emit_move_insn (crtl->eh.ehr_stackadj, tmp); 2236 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2106 #endif 2237 #endif
2107 2238
2108 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler, 2239 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2109 VOIDmode, EXPAND_NORMAL); 2240 VOIDmode, EXPAND_NORMAL);
2110 tmp = convert_memory_address (Pmode, tmp); 2241 tmp = convert_memory_address (Pmode, tmp);
2111 if (!crtl->eh.ehr_handler) 2242 if (!crtl->eh.ehr_handler)
2112 crtl->eh.ehr_handler = copy_to_reg (tmp); 2243 crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2113 else if (tmp != crtl->eh.ehr_handler) 2244 else if (tmp != crtl->eh.ehr_handler)
2114 emit_move_insn (crtl->eh.ehr_handler, tmp); 2245 emit_move_insn (crtl->eh.ehr_handler, tmp);
2115 2246
2116 if (!crtl->eh.ehr_label) 2247 if (!crtl->eh.ehr_label)
2117 crtl->eh.ehr_label = gen_label_rtx (); 2248 crtl->eh.ehr_label = gen_label_rtx ();
2123 given PC other than the normal return address. */ 2254 given PC other than the normal return address. */
2124 2255
2125 void 2256 void
2126 expand_eh_return (void) 2257 expand_eh_return (void)
2127 { 2258 {
2128 rtx around_label; 2259 rtx_code_label *around_label;
2129 2260
2130 if (! crtl->eh.ehr_label) 2261 if (! crtl->eh.ehr_label)
2131 return; 2262 return;
2132 2263
2133 crtl->calls_eh_return = 1; 2264 crtl->calls_eh_return = 1;
2144 2275
2145 #ifdef EH_RETURN_STACKADJ_RTX 2276 #ifdef EH_RETURN_STACKADJ_RTX
2146 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj); 2277 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2147 #endif 2278 #endif
2148 2279
2149 #ifdef HAVE_eh_return 2280 if (targetm.have_eh_return ())
2150 if (HAVE_eh_return) 2281 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2151 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2152 else 2282 else
2153 #endif 2283 {
2154 { 2284 if (rtx handler = EH_RETURN_HANDLER_RTX)
2155 #ifdef EH_RETURN_HANDLER_RTX 2285 emit_move_insn (handler, crtl->eh.ehr_handler);
2156 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler); 2286 else
2157 #else 2287 error ("__builtin_eh_return not supported on this target");
2158 error ("__builtin_eh_return not supported on this target");
2159 #endif
2160 } 2288 }
2161 2289
2162 emit_label (around_label); 2290 emit_label (around_label);
2163 } 2291 }
2164 2292
2180 #endif 2308 #endif
2181 2309
2182 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend); 2310 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2183 } 2311 }
2184 2312
2185 /* In the following functions, we represent entries in the action table
2186 as 1-based indices. Special cases are:
2187
2188 0: null action record, non-null landing pad; implies cleanups
2189 -1: null action record, null landing pad; implies no action
2190 -2: no call-site entry; implies must_not_throw
2191 -3: we have yet to process outer regions
2192
2193 Further, no special cases apply to the "next" field of the record.
2194 For next, 0 means end of list. */
2195
2196 struct action_record
2197 {
2198 int offset;
2199 int filter;
2200 int next;
2201 };
2202
2203 static int 2313 static int
2204 action_record_eq (const void *pentry, const void *pdata) 2314 add_action_record (action_hash_type *ar_hash, int filter, int next)
2205 {
2206 const struct action_record *entry = (const struct action_record *) pentry;
2207 const struct action_record *data = (const struct action_record *) pdata;
2208 return entry->filter == data->filter && entry->next == data->next;
2209 }
2210
2211 static hashval_t
2212 action_record_hash (const void *pentry)
2213 {
2214 const struct action_record *entry = (const struct action_record *) pentry;
2215 return entry->next * 1009 + entry->filter;
2216 }
2217
2218 static int
2219 add_action_record (htab_t ar_hash, int filter, int next)
2220 { 2315 {
2221 struct action_record **slot, *new_ar, tmp; 2316 struct action_record **slot, *new_ar, tmp;
2222 2317
2223 tmp.filter = filter; 2318 tmp.filter = filter;
2224 tmp.next = next; 2319 tmp.next = next;
2225 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT); 2320 slot = ar_hash->find_slot (&tmp, INSERT);
2226 2321
2227 if ((new_ar = *slot) == NULL) 2322 if ((new_ar = *slot) == NULL)
2228 { 2323 {
2229 new_ar = XNEW (struct action_record); 2324 new_ar = XNEW (struct action_record);
2230 new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1; 2325 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2231 new_ar->filter = filter; 2326 new_ar->filter = filter;
2232 new_ar->next = next; 2327 new_ar->next = next;
2233 *slot = new_ar; 2328 *slot = new_ar;
2234 2329
2235 /* The filter value goes in untouched. The link to the next 2330 /* The filter value goes in untouched. The link to the next
2237 that there is no next record. So convert the absolute 1 based 2332 that there is no next record. So convert the absolute 1 based
2238 indices we've been carrying around into a displacement. */ 2333 indices we've been carrying around into a displacement. */
2239 2334
2240 push_sleb128 (&crtl->eh.action_record_data, filter); 2335 push_sleb128 (&crtl->eh.action_record_data, filter);
2241 if (next) 2336 if (next)
2242 next -= VEC_length (uchar, crtl->eh.action_record_data) + 1; 2337 next -= crtl->eh.action_record_data->length () + 1;
2243 push_sleb128 (&crtl->eh.action_record_data, next); 2338 push_sleb128 (&crtl->eh.action_record_data, next);
2244 } 2339 }
2245 2340
2246 return new_ar->offset; 2341 return new_ar->offset;
2247 } 2342 }
2248 2343
2249 static int 2344 static int
2250 collect_one_action_chain (htab_t ar_hash, eh_region region) 2345 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2251 { 2346 {
2252 int next; 2347 int next;
2253 2348
2254 /* If we've reached the top of the region chain, then we have 2349 /* If we've reached the top of the region chain, then we have
2255 no actions, and require no landing pad. */ 2350 no actions, and require no landing pad. */
2357 static int 2452 static int
2358 add_call_site (rtx landing_pad, int action, int section) 2453 add_call_site (rtx landing_pad, int action, int section)
2359 { 2454 {
2360 call_site_record record; 2455 call_site_record record;
2361 2456
2362 record = ggc_alloc_call_site_record_d (); 2457 record = ggc_alloc<call_site_record_d> ();
2363 record->landing_pad = landing_pad; 2458 record->landing_pad = landing_pad;
2364 record->action = action; 2459 record->action = action;
2365 2460
2366 VEC_safe_push (call_site_record, gc, 2461 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2367 crtl->eh.call_site_record[section], record); 2462
2368 2463 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2369 return call_site_base + VEC_length (call_site_record, 2464 }
2370 crtl->eh.call_site_record[section]) - 1; 2465
2466 static rtx_note *
2467 emit_note_eh_region_end (rtx_insn *insn)
2468 {
2469 rtx_insn *next = NEXT_INSN (insn);
2470
2471 /* Make sure we do not split a call and its corresponding
2472 CALL_ARG_LOCATION note. */
2473 if (next && NOTE_P (next)
2474 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2475 insn = next;
2476
2477 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2478 }
2479
2480 /* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2481 with landing pad.
2482 With landing pad being at offset 0 from the start label of the section
2483 we would miss EH delivery because 0 is special and means no landing pad. */
2484
2485 static bool
2486 maybe_add_nop_after_section_switch (void)
2487 {
2488 if (!crtl->uses_eh_lsda
2489 || !crtl->eh.call_site_record_v[1])
2490 return false;
2491 int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2492 hash_set<rtx_insn *> visited;
2493
2494 for (int i = 0; i < n; ++i)
2495 {
2496 struct call_site_record_d *cs
2497 = (*crtl->eh.call_site_record_v[1])[i];
2498 if (cs->landing_pad)
2499 {
2500 rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2501 while (true)
2502 {
2503 /* Landing pads have LABEL_PRESERVE_P flag set. This check make
2504 sure that we do not walk past landing pad visited earlier
2505 which would result in possible quadratic behaviour. */
2506 if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2507 && visited.add (insn))
2508 break;
2509
2510 /* Conservatively assume that ASM insn may be empty. We have
2511 now way to tell what they contain. */
2512 if (active_insn_p (insn)
2513 && GET_CODE (PATTERN (insn)) != ASM_INPUT
2514 && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2515 break;
2516
2517 /* If we reached the start of hot section, then NOP will be
2518 needed. */
2519 if (GET_CODE (insn) == NOTE
2520 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2521 {
2522 emit_insn_after (gen_nop (), insn);
2523 break;
2524 }
2525
2526 /* We visit only labels from cold section. We should never hit
2527 begining of the insn stream here. */
2528 insn = PREV_INSN (insn);
2529 }
2530 }
2531 }
2532 return false;
2371 } 2533 }
2372 2534
2373 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes. 2535 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2374 The new note numbers will not refer to region numbers, but 2536 The new note numbers will not refer to region numbers, but
2375 instead to call site entries. */ 2537 instead to call site entries. */
2376 2538
2377 static unsigned int 2539 static unsigned int
2378 convert_to_eh_region_ranges (void) 2540 convert_to_eh_region_ranges (void)
2379 { 2541 {
2380 rtx insn, iter, note; 2542 rtx insn;
2381 htab_t ar_hash; 2543 rtx_insn *iter;
2544 rtx_note *note;
2545 action_hash_type ar_hash (31);
2382 int last_action = -3; 2546 int last_action = -3;
2383 rtx last_action_insn = NULL_RTX; 2547 rtx_insn *last_action_insn = NULL;
2384 rtx last_landing_pad = NULL_RTX; 2548 rtx last_landing_pad = NULL_RTX;
2385 rtx first_no_action_insn = NULL_RTX; 2549 rtx_insn *first_no_action_insn = NULL;
2386 int call_site = 0; 2550 int call_site = 0;
2387 int cur_sec = 0; 2551 int cur_sec = 0;
2388 rtx section_switch_note = NULL_RTX; 2552 rtx_insn *section_switch_note = NULL;
2389 rtx first_no_action_insn_before_switch = NULL_RTX; 2553 rtx_insn *first_no_action_insn_before_switch = NULL;
2390 rtx last_no_action_insn_before_switch = NULL_RTX; 2554 rtx_insn *last_no_action_insn_before_switch = NULL;
2391 rtx *pad_map = NULL;
2392 sbitmap pad_loc = NULL;
2393 int min_labelno = 0, max_labelno = 0;
2394 int saved_call_site_base = call_site_base; 2555 int saved_call_site_base = call_site_base;
2395 2556
2396 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64); 2557 vec_alloc (crtl->eh.action_record_data, 64);
2397
2398 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2399 2558
2400 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter)) 2559 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2401 if (INSN_P (iter)) 2560 if (INSN_P (iter))
2402 { 2561 {
2403 eh_landing_pad lp; 2562 eh_landing_pad lp;
2404 eh_region region; 2563 eh_region region;
2405 bool nothrow; 2564 bool nothrow;
2406 int this_action; 2565 int this_action;
2407 rtx this_landing_pad; 2566 rtx_code_label *this_landing_pad;
2408 2567
2409 insn = iter; 2568 insn = iter;
2410 if (NONJUMP_INSN_P (insn) 2569 if (NONJUMP_INSN_P (insn)
2411 && GET_CODE (PATTERN (insn)) == SEQUENCE) 2570 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2412 insn = XVECEXP (PATTERN (insn), 0, 0); 2571 insn = XVECEXP (PATTERN (insn), 0, 0);
2413 2572
2414 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp); 2573 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2415 if (nothrow) 2574 if (nothrow)
2416 continue; 2575 continue;
2417 if (region) 2576 if (region)
2418 this_action = collect_one_action_chain (ar_hash, region); 2577 this_action = collect_one_action_chain (&ar_hash, region);
2419 else 2578 else
2420 this_action = -1; 2579 this_action = -1;
2421 2580
2422 /* Existence of catch handlers, or must-not-throw regions 2581 /* Existence of catch handlers, or must-not-throw regions
2423 implies that an lsda is needed (even if empty). */ 2582 implies that an lsda is needed (even if empty). */
2433 } 2592 }
2434 2593
2435 if (this_action >= 0) 2594 if (this_action >= 0)
2436 this_landing_pad = lp->landing_pad; 2595 this_landing_pad = lp->landing_pad;
2437 else 2596 else
2438 this_landing_pad = NULL_RTX; 2597 this_landing_pad = NULL;
2439 2598
2440 /* Differing actions or landing pads implies a change in call-site 2599 /* Differing actions or landing pads implies a change in call-site
2441 info, which implies some EH_REGION note should be emitted. */ 2600 info, which implies some EH_REGION note should be emitted. */
2442 if (last_action != this_action 2601 if (last_action != this_action
2443 || last_landing_pad != this_landing_pad) 2602 || last_landing_pad != this_landing_pad)
2451 ? -1 : -3)); 2610 ? -1 : -3));
2452 call_site = add_call_site (NULL_RTX, 0, 0); 2611 call_site = add_call_site (NULL_RTX, 0, 0);
2453 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, 2612 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2454 first_no_action_insn_before_switch); 2613 first_no_action_insn_before_switch);
2455 NOTE_EH_HANDLER (note) = call_site; 2614 NOTE_EH_HANDLER (note) = call_site;
2456 note = emit_note_after (NOTE_INSN_EH_REGION_END, 2615 note
2457 last_no_action_insn_before_switch); 2616 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2458 NOTE_EH_HANDLER (note) = call_site; 2617 NOTE_EH_HANDLER (note) = call_site;
2459 gcc_assert (last_action != -3 2618 gcc_assert (last_action != -3
2460 || (last_action_insn 2619 || (last_action_insn
2461 == last_no_action_insn_before_switch)); 2620 == last_no_action_insn_before_switch));
2462 first_no_action_insn_before_switch = NULL_RTX; 2621 first_no_action_insn_before_switch = NULL;
2463 last_no_action_insn_before_switch = NULL_RTX; 2622 last_no_action_insn_before_switch = NULL;
2464 call_site_base++; 2623 call_site_base++;
2465 } 2624 }
2466 /* If we'd not seen a previous action (-3) or the previous 2625 /* If we'd not seen a previous action (-3) or the previous
2467 action was must-not-throw (-2), then we do not need an 2626 action was must-not-throw (-2), then we do not need an
2468 end note. */ 2627 end note. */
2473 { 2632 {
2474 call_site = add_call_site (NULL_RTX, 0, cur_sec); 2633 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2475 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, 2634 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2476 first_no_action_insn); 2635 first_no_action_insn);
2477 NOTE_EH_HANDLER (note) = call_site; 2636 NOTE_EH_HANDLER (note) = call_site;
2478 first_no_action_insn = NULL_RTX; 2637 first_no_action_insn = NULL;
2479 } 2638 }
2480 2639
2481 note = emit_note_after (NOTE_INSN_EH_REGION_END, 2640 note = emit_note_eh_region_end (last_action_insn);
2482 last_action_insn);
2483 NOTE_EH_HANDLER (note) = call_site; 2641 NOTE_EH_HANDLER (note) = call_site;
2484 } 2642 }
2485 2643
2486 /* If the new action is must-not-throw, then no region notes 2644 /* If the new action is must-not-throw, then no region notes
2487 are created. */ 2645 are created. */
2507 section_switch_note = iter; 2665 section_switch_note = iter;
2508 if (first_no_action_insn) 2666 if (first_no_action_insn)
2509 { 2667 {
2510 first_no_action_insn_before_switch = first_no_action_insn; 2668 first_no_action_insn_before_switch = first_no_action_insn;
2511 last_no_action_insn_before_switch = last_action_insn; 2669 last_no_action_insn_before_switch = last_action_insn;
2512 first_no_action_insn = NULL_RTX; 2670 first_no_action_insn = NULL;
2513 gcc_assert (last_action == -1); 2671 gcc_assert (last_action == -1);
2514 last_action = -3; 2672 last_action = -3;
2515 } 2673 }
2516 /* Force closing of current EH region before section switch and 2674 /* Force closing of current EH region before section switch and
2517 opening a new one afterwards. */ 2675 opening a new one afterwards. */
2518 else if (last_action != -3) 2676 else if (last_action != -3)
2519 last_landing_pad = pc_rtx; 2677 last_landing_pad = pc_rtx;
2520 call_site_base += VEC_length (call_site_record, 2678 if (crtl->eh.call_site_record_v[cur_sec])
2521 crtl->eh.call_site_record[cur_sec]); 2679 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2522 cur_sec++; 2680 cur_sec++;
2523 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL); 2681 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2524 crtl->eh.call_site_record[cur_sec] 2682 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2525 = VEC_alloc (call_site_record, gc, 10);
2526 max_labelno = max_label_num ();
2527 min_labelno = get_first_label_num ();
2528 pad_map = XCNEWVEC (rtx, max_labelno - min_labelno + 1);
2529 pad_loc = sbitmap_alloc (max_labelno - min_labelno + 1);
2530 } 2683 }
2531 else if (LABEL_P (iter) && pad_map)
2532 SET_BIT (pad_loc, CODE_LABEL_NUMBER (iter) - min_labelno);
2533 2684
2534 if (last_action >= -1 && ! first_no_action_insn) 2685 if (last_action >= -1 && ! first_no_action_insn)
2535 { 2686 {
2536 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn); 2687 note = emit_note_eh_region_end (last_action_insn);
2537 NOTE_EH_HANDLER (note) = call_site; 2688 NOTE_EH_HANDLER (note) = call_site;
2538 } 2689 }
2539 2690
2540 call_site_base = saved_call_site_base; 2691 call_site_base = saved_call_site_base;
2541 2692
2542 if (pad_map)
2543 {
2544 /* When doing hot/cold partitioning, ensure landing pads are
2545 always in the same section as the EH region, .gcc_except_table
2546 can't express it otherwise. */
2547 for (cur_sec = 0; cur_sec < 2; cur_sec++)
2548 {
2549 int i, idx;
2550 int n = VEC_length (call_site_record,
2551 crtl->eh.call_site_record[cur_sec]);
2552 basic_block prev_bb = NULL, padbb;
2553
2554 for (i = 0; i < n; ++i)
2555 {
2556 struct call_site_record_d *cs =
2557 VEC_index (call_site_record,
2558 crtl->eh.call_site_record[cur_sec], i);
2559 rtx jump, note;
2560
2561 if (cs->landing_pad == NULL_RTX)
2562 continue;
2563 idx = CODE_LABEL_NUMBER (cs->landing_pad) - min_labelno;
2564 /* If the landing pad is in the correct section, nothing
2565 is needed. */
2566 if (TEST_BIT (pad_loc, idx) ^ (cur_sec == 0))
2567 continue;
2568 /* Otherwise, if we haven't seen this pad yet, we need to
2569 add a new label and jump to the correct section. */
2570 if (pad_map[idx] == NULL_RTX)
2571 {
2572 pad_map[idx] = gen_label_rtx ();
2573 if (prev_bb == NULL)
2574 for (iter = section_switch_note;
2575 iter; iter = PREV_INSN (iter))
2576 if (NOTE_INSN_BASIC_BLOCK_P (iter))
2577 {
2578 prev_bb = NOTE_BASIC_BLOCK (iter);
2579 break;
2580 }
2581 if (cur_sec == 0)
2582 {
2583 note = emit_label_before (pad_map[idx],
2584 section_switch_note);
2585 jump = emit_jump_insn_before (gen_jump (cs->landing_pad),
2586 section_switch_note);
2587 }
2588 else
2589 {
2590 jump = emit_jump_insn_after (gen_jump (cs->landing_pad),
2591 section_switch_note);
2592 note = emit_label_after (pad_map[idx],
2593 section_switch_note);
2594 }
2595 JUMP_LABEL (jump) = cs->landing_pad;
2596 add_reg_note (jump, REG_CROSSING_JUMP, NULL_RTX);
2597 iter = NEXT_INSN (cs->landing_pad);
2598 if (iter && NOTE_INSN_BASIC_BLOCK_P (iter))
2599 padbb = NOTE_BASIC_BLOCK (iter);
2600 else
2601 padbb = NULL;
2602 if (padbb && prev_bb
2603 && BB_PARTITION (padbb) != BB_UNPARTITIONED)
2604 {
2605 basic_block bb;
2606 int part
2607 = BB_PARTITION (padbb) == BB_COLD_PARTITION
2608 ? BB_HOT_PARTITION : BB_COLD_PARTITION;
2609 edge_iterator ei;
2610 edge e;
2611
2612 bb = create_basic_block (note, jump, prev_bb);
2613 make_single_succ_edge (bb, padbb, EDGE_CROSSING);
2614 BB_SET_PARTITION (bb, part);
2615 for (ei = ei_start (padbb->preds);
2616 (e = ei_safe_edge (ei)); )
2617 {
2618 if ((e->flags & (EDGE_EH|EDGE_CROSSING))
2619 == (EDGE_EH|EDGE_CROSSING))
2620 {
2621 redirect_edge_succ (e, bb);
2622 e->flags &= ~EDGE_CROSSING;
2623 }
2624 else
2625 ei_next (&ei);
2626 }
2627 if (cur_sec == 0)
2628 prev_bb = bb;
2629 }
2630 }
2631 cs->landing_pad = pad_map[idx];
2632 }
2633 }
2634
2635 sbitmap_free (pad_loc);
2636 XDELETEVEC (pad_map);
2637 }
2638
2639 htab_delete (ar_hash);
2640 return 0; 2693 return 0;
2641 } 2694 }
2642 2695
2643 static bool 2696 namespace {
2644 gate_convert_to_eh_region_ranges (void) 2697
2698 const pass_data pass_data_convert_to_eh_region_ranges =
2699 {
2700 RTL_PASS, /* type */
2701 "eh_ranges", /* name */
2702 OPTGROUP_NONE, /* optinfo_flags */
2703 TV_NONE, /* tv_id */
2704 0, /* properties_required */
2705 0, /* properties_provided */
2706 0, /* properties_destroyed */
2707 0, /* todo_flags_start */
2708 0, /* todo_flags_finish */
2709 };
2710
2711 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2712 {
2713 public:
2714 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2715 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2716 {}
2717
2718 /* opt_pass methods: */
2719 virtual bool gate (function *);
2720 virtual unsigned int execute (function *)
2721 {
2722 int ret = convert_to_eh_region_ranges ();
2723 maybe_add_nop_after_section_switch ();
2724 return ret;
2725 }
2726
2727 }; // class pass_convert_to_eh_region_ranges
2728
2729 bool
2730 pass_convert_to_eh_region_ranges::gate (function *)
2645 { 2731 {
2646 /* Nothing to do for SJLJ exceptions or if no regions created. */ 2732 /* Nothing to do for SJLJ exceptions or if no regions created. */
2647 if (cfun->eh->region_tree == NULL) 2733 if (cfun->eh->region_tree == NULL)
2648 return false; 2734 return false;
2649 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 2735 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2650 return false; 2736 return false;
2651 return true; 2737 return true;
2652 } 2738 }
2653 2739
2654 struct rtl_opt_pass pass_convert_to_eh_region_ranges = 2740 } // anon namespace
2655 { 2741
2656 { 2742 rtl_opt_pass *
2657 RTL_PASS, 2743 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2658 "eh_ranges", /* name */ 2744 {
2659 gate_convert_to_eh_region_ranges, /* gate */ 2745 return new pass_convert_to_eh_region_ranges (ctxt);
2660 convert_to_eh_region_ranges, /* execute */ 2746 }
2661 NULL, /* sub */
2662 NULL, /* next */
2663 0, /* static_pass_number */
2664 TV_NONE, /* tv_id */
2665 0, /* properties_required */
2666 0, /* properties_provided */
2667 0, /* properties_destroyed */
2668 0, /* todo_flags_start */
2669 TODO_dump_func, /* todo_flags_finish */
2670 }
2671 };
2672 2747
2673 static void 2748 static void
2674 push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value) 2749 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2675 { 2750 {
2676 do 2751 do
2677 { 2752 {
2678 unsigned char byte = value & 0x7f; 2753 unsigned char byte = value & 0x7f;
2679 value >>= 7; 2754 value >>= 7;
2680 if (value) 2755 if (value)
2681 byte |= 0x80; 2756 byte |= 0x80;
2682 VEC_safe_push (uchar, gc, *data_area, byte); 2757 vec_safe_push (*data_area, byte);
2683 } 2758 }
2684 while (value); 2759 while (value);
2685 } 2760 }
2686 2761
2687 static void 2762 static void
2688 push_sleb128 (VEC (uchar, gc) **data_area, int value) 2763 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2689 { 2764 {
2690 unsigned char byte; 2765 unsigned char byte;
2691 int more; 2766 int more;
2692 2767
2693 do 2768 do
2696 value >>= 7; 2771 value >>= 7;
2697 more = ! ((value == 0 && (byte & 0x40) == 0) 2772 more = ! ((value == 0 && (byte & 0x40) == 0)
2698 || (value == -1 && (byte & 0x40) != 0)); 2773 || (value == -1 && (byte & 0x40) != 0));
2699 if (more) 2774 if (more)
2700 byte |= 0x80; 2775 byte |= 0x80;
2701 VEC_safe_push (uchar, gc, *data_area, byte); 2776 vec_safe_push (*data_area, byte);
2702 } 2777 }
2703 while (more); 2778 while (more);
2704 } 2779 }
2705 2780
2706 2781
2707 #ifndef HAVE_AS_LEB128
2708 static int 2782 static int
2709 dw2_size_of_call_site_table (int section) 2783 dw2_size_of_call_site_table (int section)
2710 { 2784 {
2711 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]); 2785 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2712 int size = n * (4 + 4 + 4); 2786 int size = n * (4 + 4 + 4);
2713 int i; 2787 int i;
2714 2788
2715 for (i = 0; i < n; ++i) 2789 for (i = 0; i < n; ++i)
2716 { 2790 {
2717 struct call_site_record_d *cs = 2791 struct call_site_record_d *cs =
2718 VEC_index (call_site_record, crtl->eh.call_site_record[section], i); 2792 (*crtl->eh.call_site_record_v[section])[i];
2719 size += size_of_uleb128 (cs->action); 2793 size += size_of_uleb128 (cs->action);
2720 } 2794 }
2721 2795
2722 return size; 2796 return size;
2723 } 2797 }
2724 2798
2725 static int 2799 static int
2726 sjlj_size_of_call_site_table (void) 2800 sjlj_size_of_call_site_table (void)
2727 { 2801 {
2728 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]); 2802 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2729 int size = 0; 2803 int size = 0;
2730 int i; 2804 int i;
2731 2805
2732 for (i = 0; i < n; ++i) 2806 for (i = 0; i < n; ++i)
2733 { 2807 {
2734 struct call_site_record_d *cs = 2808 struct call_site_record_d *cs =
2735 VEC_index (call_site_record, crtl->eh.call_site_record[0], i); 2809 (*crtl->eh.call_site_record_v[0])[i];
2736 size += size_of_uleb128 (INTVAL (cs->landing_pad)); 2810 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2737 size += size_of_uleb128 (cs->action); 2811 size += size_of_uleb128 (cs->action);
2738 } 2812 }
2739 2813
2740 return size; 2814 return size;
2741 } 2815 }
2742 #endif
2743 2816
2744 static void 2817 static void
2745 dw2_output_call_site_table (int cs_format, int section) 2818 dw2_output_call_site_table (int cs_format, int section)
2746 { 2819 {
2747 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]); 2820 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2748 int i; 2821 int i;
2749 const char *begin; 2822 const char *begin;
2750 2823
2751 if (section == 0) 2824 if (section == 0)
2752 begin = current_function_func_begin_label; 2825 begin = current_function_func_begin_label;
2755 else 2828 else
2756 begin = crtl->subsections.cold_section_label; 2829 begin = crtl->subsections.cold_section_label;
2757 2830
2758 for (i = 0; i < n; ++i) 2831 for (i = 0; i < n; ++i)
2759 { 2832 {
2760 struct call_site_record_d *cs = 2833 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2761 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2762 char reg_start_lab[32]; 2834 char reg_start_lab[32];
2763 char reg_end_lab[32]; 2835 char reg_end_lab[32];
2764 char landing_pad_lab[32]; 2836 char landing_pad_lab[32];
2765 2837
2766 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i); 2838 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2804 } 2876 }
2805 2877
2806 static void 2878 static void
2807 sjlj_output_call_site_table (void) 2879 sjlj_output_call_site_table (void)
2808 { 2880 {
2809 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]); 2881 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2810 int i; 2882 int i;
2811 2883
2812 for (i = 0; i < n; ++i) 2884 for (i = 0; i < n; ++i)
2813 { 2885 {
2814 struct call_site_record_d *cs = 2886 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2815 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2816 2887
2817 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad), 2888 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2818 "region %d landing pad", i); 2889 "region %d landing pad", i);
2819 dw2_asm_output_data_uleb128 (cs->action, "action"); 2890 dw2_asm_output_data_uleb128 (cs->action, "action");
2820 } 2891 }
2831 2902
2832 if (exception_section) 2903 if (exception_section)
2833 s = exception_section; 2904 s = exception_section;
2834 else 2905 else
2835 { 2906 {
2907 int flags;
2908
2909 if (EH_TABLES_CAN_BE_READ_ONLY)
2910 {
2911 int tt_format =
2912 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2913 flags = ((! flag_pic
2914 || ((tt_format & 0x70) != DW_EH_PE_absptr
2915 && (tt_format & 0x70) != DW_EH_PE_aligned))
2916 ? 0 : SECTION_WRITE);
2917 }
2918 else
2919 flags = SECTION_WRITE;
2920
2836 /* Compute the section and cache it into exception_section, 2921 /* Compute the section and cache it into exception_section,
2837 unless it depends on the function name. */ 2922 unless it depends on the function name. */
2838 if (targetm.have_named_sections) 2923 if (targetm_common.have_named_sections)
2839 { 2924 {
2840 int flags;
2841
2842 if (EH_TABLES_CAN_BE_READ_ONLY)
2843 {
2844 int tt_format =
2845 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2846 flags = ((! flag_pic
2847 || ((tt_format & 0x70) != DW_EH_PE_absptr
2848 && (tt_format & 0x70) != DW_EH_PE_aligned))
2849 ? 0 : SECTION_WRITE);
2850 }
2851 else
2852 flags = SECTION_WRITE;
2853
2854 #ifdef HAVE_LD_EH_GC_SECTIONS 2925 #ifdef HAVE_LD_EH_GC_SECTIONS
2855 if (flag_function_sections) 2926 if (flag_function_sections
2927 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2856 { 2928 {
2857 char *section_name = XNEWVEC (char, strlen (fnname) + 32); 2929 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2930 /* The EH table must match the code section, so only mark
2931 it linkonce if we have COMDAT groups to tie them together. */
2932 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2933 flags |= SECTION_LINKONCE;
2858 sprintf (section_name, ".gcc_except_table.%s", fnname); 2934 sprintf (section_name, ".gcc_except_table.%s", fnname);
2859 s = get_section (section_name, flags, NULL); 2935 s = get_section (section_name, flags, current_function_decl);
2860 free (section_name); 2936 free (section_name);
2861 } 2937 }
2862 else 2938 else
2863 #endif 2939 #endif
2864 exception_section 2940 exception_section
2865 = s = get_section (".gcc_except_table", flags, NULL); 2941 = s = get_section (".gcc_except_table", flags, NULL);
2866 } 2942 }
2867 else 2943 else
2868 exception_section 2944 exception_section
2869 = s = flag_pic ? data_section : readonly_data_section; 2945 = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2870 } 2946 }
2871 2947
2872 switch_to_section (s); 2948 switch_to_section (s);
2873 } 2949 }
2874 2950
2885 2961
2886 if (type == NULL_TREE) 2962 if (type == NULL_TREE)
2887 value = const0_rtx; 2963 value = const0_rtx;
2888 else 2964 else
2889 { 2965 {
2890 struct varpool_node *node;
2891
2892 /* FIXME lto. pass_ipa_free_lang_data changes all types to 2966 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2893 runtime types so TYPE should already be a runtime type 2967 runtime types so TYPE should already be a runtime type
2894 reference. When pass_ipa_free_lang data is made a default 2968 reference. When pass_ipa_free_lang data is made a default
2895 pass, we can then remove the call to lookup_type_for_runtime 2969 pass, we can then remove the call to lookup_type_for_runtime
2896 below. */ 2970 below. */
2904 care of this for us. */ 2978 care of this for us. */
2905 STRIP_NOPS (type); 2979 STRIP_NOPS (type);
2906 if (TREE_CODE (type) == ADDR_EXPR) 2980 if (TREE_CODE (type) == ADDR_EXPR)
2907 { 2981 {
2908 type = TREE_OPERAND (type, 0); 2982 type = TREE_OPERAND (type, 0);
2909 if (TREE_CODE (type) == VAR_DECL) 2983 if (VAR_P (type))
2910 { 2984 is_public = TREE_PUBLIC (type);
2911 node = varpool_node (type);
2912 if (node)
2913 varpool_mark_needed_node (node);
2914 is_public = TREE_PUBLIC (type);
2915 }
2916 } 2985 }
2917 else 2986 else
2918 gcc_assert (TREE_CODE (type) == INTEGER_CST); 2987 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2919 } 2988 }
2920 2989
2931 3000
2932 static void 3001 static void
2933 output_one_function_exception_table (int section) 3002 output_one_function_exception_table (int section)
2934 { 3003 {
2935 int tt_format, cs_format, lp_format, i; 3004 int tt_format, cs_format, lp_format, i;
2936 #ifdef HAVE_AS_LEB128
2937 char ttype_label[32]; 3005 char ttype_label[32];
2938 char cs_after_size_label[32]; 3006 char cs_after_size_label[32];
2939 char cs_end_label[32]; 3007 char cs_end_label[32];
2940 #else
2941 int call_site_len; 3008 int call_site_len;
2942 #endif
2943 int have_tt_data; 3009 int have_tt_data;
2944 int tt_format_size = 0; 3010 int tt_format_size = 0;
2945 3011
2946 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) 3012 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2947 || (targetm.arm_eabi_unwinder 3013 || (targetm.arm_eabi_unwinder
2948 ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi) 3014 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2949 : VEC_length (uchar, cfun->eh->ehspec_data.other))); 3015 : vec_safe_length (cfun->eh->ehspec_data.other)));
2950 3016
2951 /* Indicate the format of the @TType entries. */ 3017 /* Indicate the format of the @TType entries. */
2952 if (! have_tt_data) 3018 if (! have_tt_data)
2953 tt_format = DW_EH_PE_omit; 3019 tt_format = DW_EH_PE_omit;
2954 else 3020 else
2955 { 3021 {
2956 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); 3022 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2957 #ifdef HAVE_AS_LEB128 3023 if (HAVE_AS_LEB128)
2958 ASM_GENERATE_INTERNAL_LABEL (ttype_label, 3024 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2959 section ? "LLSDATTC" : "LLSDATT", 3025 section ? "LLSDATTC" : "LLSDATT",
2960 current_function_funcdef_no); 3026 current_function_funcdef_no);
2961 #endif 3027
2962 tt_format_size = size_of_encoded_value (tt_format); 3028 tt_format_size = size_of_encoded_value (tt_format);
2963 3029
2964 assemble_align (tt_format_size * BITS_PER_UNIT); 3030 assemble_align (tt_format_size * BITS_PER_UNIT);
2965 } 3031 }
2966 3032
2982 /* @LPStart pointer would go here. */ 3048 /* @LPStart pointer would go here. */
2983 3049
2984 dw2_asm_output_data (1, tt_format, "@TType format (%s)", 3050 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2985 eh_data_format_name (tt_format)); 3051 eh_data_format_name (tt_format));
2986 3052
2987 #ifndef HAVE_AS_LEB128 3053 if (!HAVE_AS_LEB128)
2988 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 3054 {
2989 call_site_len = sjlj_size_of_call_site_table (); 3055 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2990 else 3056 call_site_len = sjlj_size_of_call_site_table ();
2991 call_site_len = dw2_size_of_call_site_table (section); 3057 else
2992 #endif 3058 call_site_len = dw2_size_of_call_site_table (section);
3059 }
2993 3060
2994 /* A pc-relative 4-byte displacement to the @TType data. */ 3061 /* A pc-relative 4-byte displacement to the @TType data. */
2995 if (have_tt_data) 3062 if (have_tt_data)
2996 { 3063 {
2997 #ifdef HAVE_AS_LEB128 3064 if (HAVE_AS_LEB128)
2998 char ttype_after_disp_label[32];
2999 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3000 section ? "LLSDATTDC" : "LLSDATTD",
3001 current_function_funcdef_no);
3002 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3003 "@TType base offset");
3004 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3005 #else
3006 /* Ug. Alignment queers things. */
3007 unsigned int before_disp, after_disp, last_disp, disp;
3008
3009 before_disp = 1 + 1;
3010 after_disp = (1 + size_of_uleb128 (call_site_len)
3011 + call_site_len
3012 + VEC_length (uchar, crtl->eh.action_record_data)
3013 + (VEC_length (tree, cfun->eh->ttype_data)
3014 * tt_format_size));
3015
3016 disp = after_disp;
3017 do
3018 { 3065 {
3019 unsigned int disp_size, pad; 3066 char ttype_after_disp_label[32];
3020 3067 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3021 last_disp = disp; 3068 section ? "LLSDATTDC" : "LLSDATTD",
3022 disp_size = size_of_uleb128 (disp); 3069 current_function_funcdef_no);
3023 pad = before_disp + disp_size + after_disp; 3070 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3024 if (pad % tt_format_size) 3071 "@TType base offset");
3025 pad = tt_format_size - (pad % tt_format_size); 3072 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3026 else
3027 pad = 0;
3028 disp = after_disp + pad;
3029 } 3073 }
3030 while (disp != last_disp); 3074 else
3031 3075 {
3032 dw2_asm_output_data_uleb128 (disp, "@TType base offset"); 3076 /* Ug. Alignment queers things. */
3033 #endif 3077 unsigned int before_disp, after_disp, last_disp, disp;
3034 } 3078
3079 before_disp = 1 + 1;
3080 after_disp = (1 + size_of_uleb128 (call_site_len)
3081 + call_site_len
3082 + vec_safe_length (crtl->eh.action_record_data)
3083 + (vec_safe_length (cfun->eh->ttype_data)
3084 * tt_format_size));
3085
3086 disp = after_disp;
3087 do
3088 {
3089 unsigned int disp_size, pad;
3090
3091 last_disp = disp;
3092 disp_size = size_of_uleb128 (disp);
3093 pad = before_disp + disp_size + after_disp;
3094 if (pad % tt_format_size)
3095 pad = tt_format_size - (pad % tt_format_size);
3096 else
3097 pad = 0;
3098 disp = after_disp + pad;
3099 }
3100 while (disp != last_disp);
3101
3102 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3103 }
3104 }
3035 3105
3036 /* Indicate the format of the call-site offsets. */ 3106 /* Indicate the format of the call-site offsets. */
3037 #ifdef HAVE_AS_LEB128 3107 if (HAVE_AS_LEB128)
3038 cs_format = DW_EH_PE_uleb128; 3108 cs_format = DW_EH_PE_uleb128;
3039 #else 3109 else
3040 cs_format = DW_EH_PE_udata4; 3110 cs_format = DW_EH_PE_udata4;
3041 #endif 3111
3042 dw2_asm_output_data (1, cs_format, "call-site format (%s)", 3112 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3043 eh_data_format_name (cs_format)); 3113 eh_data_format_name (cs_format));
3044 3114
3045 #ifdef HAVE_AS_LEB128 3115 if (HAVE_AS_LEB128)
3046 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, 3116 {
3047 section ? "LLSDACSBC" : "LLSDACSB", 3117 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3048 current_function_funcdef_no); 3118 section ? "LLSDACSBC" : "LLSDACSB",
3049 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, 3119 current_function_funcdef_no);
3050 section ? "LLSDACSEC" : "LLSDACSE", 3120 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3051 current_function_funcdef_no); 3121 section ? "LLSDACSEC" : "LLSDACSE",
3052 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label, 3122 current_function_funcdef_no);
3053 "Call-site table length"); 3123 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3054 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label); 3124 "Call-site table length");
3055 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 3125 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3056 sjlj_output_call_site_table (); 3126 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3127 sjlj_output_call_site_table ();
3128 else
3129 dw2_output_call_site_table (cs_format, section);
3130 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3131 }
3057 else 3132 else
3058 dw2_output_call_site_table (cs_format, section); 3133 {
3059 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label); 3134 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3060 #else 3135 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3061 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length"); 3136 sjlj_output_call_site_table ();
3062 if (targetm.except_unwind_info (&global_options) == UI_SJLJ) 3137 else
3063 sjlj_output_call_site_table (); 3138 dw2_output_call_site_table (cs_format, section);
3064 else 3139 }
3065 dw2_output_call_site_table (cs_format, section);
3066 #endif
3067 3140
3068 /* ??? Decode and interpret the data for flag_debug_asm. */ 3141 /* ??? Decode and interpret the data for flag_debug_asm. */
3069 { 3142 {
3070 uchar uc; 3143 uchar uc;
3071 FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc) 3144 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3072 dw2_asm_output_data (1, uc, i ? NULL : "Action record table"); 3145 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3073 } 3146 }
3074 3147
3075 if (have_tt_data) 3148 if (have_tt_data)
3076 assemble_align (tt_format_size * BITS_PER_UNIT); 3149 assemble_align (tt_format_size * BITS_PER_UNIT);
3077 3150
3078 i = VEC_length (tree, cfun->eh->ttype_data); 3151 i = vec_safe_length (cfun->eh->ttype_data);
3079 while (i-- > 0) 3152 while (i-- > 0)
3080 { 3153 {
3081 tree type = VEC_index (tree, cfun->eh->ttype_data, i); 3154 tree type = (*cfun->eh->ttype_data)[i];
3082 output_ttype (type, tt_format, tt_format_size); 3155 output_ttype (type, tt_format, tt_format_size);
3083 } 3156 }
3084 3157
3085 #ifdef HAVE_AS_LEB128 3158 if (HAVE_AS_LEB128 && have_tt_data)
3086 if (have_tt_data) 3159 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3087 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3088 #endif
3089 3160
3090 /* ??? Decode and interpret the data for flag_debug_asm. */ 3161 /* ??? Decode and interpret the data for flag_debug_asm. */
3091 if (targetm.arm_eabi_unwinder) 3162 if (targetm.arm_eabi_unwinder)
3092 { 3163 {
3093 tree type; 3164 tree type;
3094 for (i = 0; 3165 for (i = 0;
3095 VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i) 3166 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3096 output_ttype (type, tt_format, tt_format_size); 3167 output_ttype (type, tt_format, tt_format_size);
3097 } 3168 }
3098 else 3169 else
3099 { 3170 {
3100 uchar uc; 3171 uchar uc;
3101 for (i = 0; 3172 for (i = 0;
3102 VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i) 3173 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3103 dw2_asm_output_data (1, uc, 3174 dw2_asm_output_data (1, uc,
3104 i ? NULL : "Exception specification table"); 3175 i ? NULL : "Exception specification table");
3105 } 3176 }
3106 } 3177 }
3107 3178
3126 3197
3127 /* If the target wants a label to begin the table, emit it here. */ 3198 /* If the target wants a label to begin the table, emit it here. */
3128 targetm.asm_out.emit_except_table_label (asm_out_file); 3199 targetm.asm_out.emit_except_table_label (asm_out_file);
3129 3200
3130 output_one_function_exception_table (0); 3201 output_one_function_exception_table (0);
3131 if (crtl->eh.call_site_record[1] != NULL) 3202 if (crtl->eh.call_site_record_v[1])
3132 output_one_function_exception_table (1); 3203 output_one_function_exception_table (1);
3133 3204
3134 switch_to_section (current_function_section ()); 3205 switch_to_section (current_function_section ());
3135 } 3206 }
3136 3207
3137 void 3208 void
3138 set_eh_throw_stmt_table (struct function *fun, struct htab *table) 3209 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3139 { 3210 {
3140 fun->eh->throw_stmt_table = table; 3211 fun->eh->throw_stmt_table = table;
3141 } 3212 }
3142 3213
3143 htab_t 3214 hash_map<gimple *, int> *
3144 get_eh_throw_stmt_table (struct function *fun) 3215 get_eh_throw_stmt_table (struct function *fun)
3145 { 3216 {
3146 return fun->eh->throw_stmt_table; 3217 return fun->eh->throw_stmt_table;
3147 } 3218 }
3148 3219
3208 if (current_ir_type () == IR_GIMPLE) 3279 if (current_ir_type () == IR_GIMPLE)
3209 { 3280 {
3210 for (lp = i->landing_pads; lp ; lp = lp->next_lp) 3281 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3211 { 3282 {
3212 fprintf (out, "{%i,", lp->index); 3283 fprintf (out, "{%i,", lp->index);
3213 print_generic_expr (out, lp->post_landing_pad, 0); 3284 print_generic_expr (out, lp->post_landing_pad);
3214 fputc ('}', out); 3285 fputc ('}', out);
3215 if (lp->next_lp) 3286 if (lp->next_lp)
3216 fputc (',', out); 3287 fputc (',', out);
3217 } 3288 }
3218 } 3289 }
3226 NOTE_P (lp->landing_pad) ? "(del)" : ""); 3297 NOTE_P (lp->landing_pad) ? "(del)" : "");
3227 else 3298 else
3228 fprintf (out, "(nil),"); 3299 fprintf (out, "(nil),");
3229 if (lp->post_landing_pad) 3300 if (lp->post_landing_pad)
3230 { 3301 {
3231 rtx lab = label_rtx (lp->post_landing_pad); 3302 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3232 fprintf (out, "%i%s}", INSN_UID (lab), 3303 fprintf (out, "%i%s}", INSN_UID (lab),
3233 NOTE_P (lab) ? "(del)" : ""); 3304 NOTE_P (lab) ? "(del)" : "");
3234 } 3305 }
3235 else 3306 else
3236 fprintf (out, "(nil)}"); 3307 fprintf (out, "(nil)}");
3254 { 3325 {
3255 fputc ('{', out); 3326 fputc ('{', out);
3256 if (c->label) 3327 if (c->label)
3257 { 3328 {
3258 fprintf (out, "lab:"); 3329 fprintf (out, "lab:");
3259 print_generic_expr (out, c->label, 0); 3330 print_generic_expr (out, c->label);
3260 fputc (';', out); 3331 fputc (';', out);
3261 } 3332 }
3262 print_generic_expr (out, c->type_list, 0); 3333 print_generic_expr (out, c->type_list);
3263 fputc ('}', out); 3334 fputc ('}', out);
3264 if (c->next_catch) 3335 if (c->next_catch)
3265 fputc (',', out); 3336 fputc (',', out);
3266 } 3337 }
3267 } 3338 }
3268 break; 3339 break;
3269 3340
3270 case ERT_ALLOWED_EXCEPTIONS: 3341 case ERT_ALLOWED_EXCEPTIONS:
3271 fprintf (out, " filter :%i types:", i->u.allowed.filter); 3342 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3272 print_generic_expr (out, i->u.allowed.type_list, 0); 3343 print_generic_expr (out, i->u.allowed.type_list);
3273 break; 3344 break;
3274 } 3345 }
3275 fputc ('\n', out); 3346 fputc ('\n', out);
3276 3347
3277 /* If there are sub-regions, process them. */ 3348 /* If there are sub-regions, process them. */
3317 3388
3318 if (!fun->eh->region_tree) 3389 if (!fun->eh->region_tree)
3319 return; 3390 return;
3320 3391
3321 count_r = 0; 3392 count_r = 0;
3322 for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i) 3393 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3323 if (r) 3394 if (r)
3324 { 3395 {
3325 if (r->index == i) 3396 if (r->index == i)
3326 count_r++; 3397 count_r++;
3327 else 3398 else
3330 err = true; 3401 err = true;
3331 } 3402 }
3332 } 3403 }
3333 3404
3334 count_lp = 0; 3405 count_lp = 0;
3335 for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i) 3406 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3336 if (lp) 3407 if (lp)
3337 { 3408 {
3338 if (lp->index == i) 3409 if (lp->index == i)
3339 count_lp++; 3410 count_lp++;
3340 else 3411 else
3347 depth = nvisited_lp = nvisited_r = 0; 3418 depth = nvisited_lp = nvisited_r = 0;
3348 outer = NULL; 3419 outer = NULL;
3349 r = fun->eh->region_tree; 3420 r = fun->eh->region_tree;
3350 while (1) 3421 while (1)
3351 { 3422 {
3352 if (VEC_index (eh_region, fun->eh->region_array, r->index) != r) 3423 if ((*fun->eh->region_array)[r->index] != r)
3353 { 3424 {
3354 error ("region_array is corrupted for region %i", r->index); 3425 error ("region_array is corrupted for region %i", r->index);
3355 err = true; 3426 err = true;
3356 } 3427 }
3357 if (r->outer != outer) 3428 if (r->outer != outer)
3366 } 3437 }
3367 nvisited_r++; 3438 nvisited_r++;
3368 3439
3369 for (lp = r->landing_pads; lp ; lp = lp->next_lp) 3440 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3370 { 3441 {
3371 if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp) 3442 if ((*fun->eh->lp_array)[lp->index] != lp)
3372 { 3443 {
3373 error ("lp_array is corrupted for lp %i", lp->index); 3444 error ("lp_array is corrupted for lp %i", lp->index);
3374 err = true; 3445 err = true;
3375 } 3446 }
3376 if (lp->region != r) 3447 if (lp->region != r)