Merge branch 'vendor/GCC44'
[dragonfly.git] / contrib / gcc-4.4 / gcc / except.c
CommitLineData
c251ad9e
SS
1/* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
4b1e227d 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
c251ad9e
SS
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
57#include "function.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "insn-config.h"
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
65#include "output.h"
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
68#include "dwarf2.h"
69#include "toplev.h"
70#include "hashtab.h"
71#include "intl.h"
72#include "ggc.h"
73#include "tm_p.h"
74#include "target.h"
75#include "langhooks.h"
76#include "cgraph.h"
77#include "diagnostic.h"
78#include "tree-pass.h"
79#include "timevar.h"
80
81/* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83#ifndef EH_RETURN_DATA_REGNO
84#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85#endif
86
87/* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89gimple (*lang_protect_cleanup_actions) (void);
90
91/* Return true if type A catches type B. */
92int (*lang_eh_type_covers) (tree a, tree b);
93
94/* Map a type to a runtime object to match type. */
95tree (*lang_eh_runtime_type) (tree);
96
97/* A hash table of label to region number. */
98
99struct ehl_map_entry GTY(())
100{
101 rtx label;
102 struct eh_region *region;
103};
104
105static GTY(()) int call_site_base;
106static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109/* Describe the SjLj_Function_Context structure. */
110static GTY(()) tree sjlj_fc_type_node;
111static int sjlj_fc_call_site_ofs;
112static int sjlj_fc_data_ofs;
113static int sjlj_fc_personality_ofs;
114static int sjlj_fc_lsda_ofs;
115static int sjlj_fc_jbuf_ofs;
116\f
117/* Describes one exception region. */
118struct eh_region GTY(())
119{
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
122
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
126
127 /* An identifier for this region. */
128 int region_number;
129
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
132 bitmap aka;
133
134 /* Each region does exactly one thing. */
135 enum eh_region_type
136 {
137 ERT_UNKNOWN = 0,
138 ERT_CLEANUP,
139 ERT_TRY,
140 ERT_CATCH,
141 ERT_ALLOWED_EXCEPTIONS,
142 ERT_MUST_NOT_THROW,
143 ERT_THROW
144 } type;
145
146 /* Holds the action to perform based on the preceding type. */
147 union eh_region_u {
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
154
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
160 tree type_list;
161 tree filter_list;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
163
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
166 tree type_list;
167 int filter;
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
169
170 /* The type given by a call to "throw foo();", or discovered
171 for a throw. */
172 struct eh_region_u_throw {
173 tree type;
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
175
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
182
183 /* Entry point for this region's handler before landing pads are built. */
184 rtx label;
185 tree tree_label;
186
187 /* Entry point for this region's handler from the runtime eh library. */
188 rtx landing_pad;
189
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
192
193 /* The RESX insn for handing off control to the next outermost handler,
194 if appropriate. */
195 rtx resume;
196
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
199};
200
201typedef struct eh_region *eh_region;
202
203struct call_site_record GTY(())
204{
205 rtx landing_pad;
206 int action;
207};
208
209DEF_VEC_P(eh_region);
210DEF_VEC_ALLOC_P(eh_region, gc);
211
212/* Used to save exception status for each function. */
213struct eh_status GTY(())
214{
215 /* The tree of all regions for this function. */
216 struct eh_region *region_tree;
217
218 /* The same information as an indexable array. */
219 VEC(eh_region,gc) *region_array;
220 int last_region_number;
221
222 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
223};
224\f
225static int t2r_eq (const void *, const void *);
226static hashval_t t2r_hash (const void *);
227static void add_type_for_runtime (tree);
228static tree lookup_type_for_runtime (tree);
229
230static void remove_unreachable_regions (rtx);
231
232static int ttypes_filter_eq (const void *, const void *);
233static hashval_t ttypes_filter_hash (const void *);
234static int ehspec_filter_eq (const void *, const void *);
235static hashval_t ehspec_filter_hash (const void *);
236static int add_ttypes_entry (htab_t, tree);
237static int add_ehspec_entry (htab_t, htab_t, tree);
238static void assign_filter_values (void);
239static void build_post_landing_pads (void);
240static void connect_post_landing_pads (void);
241static void dw2_build_landing_pads (void);
242
243struct sjlj_lp_info;
244static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
245static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
246static void sjlj_mark_call_sites (struct sjlj_lp_info *);
247static void sjlj_emit_function_enter (rtx);
248static void sjlj_emit_function_exit (void);
249static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
250static void sjlj_build_landing_pads (void);
251
252static hashval_t ehl_hash (const void *);
253static int ehl_eq (const void *, const void *);
254static void add_ehl_entry (rtx, struct eh_region *);
255static void remove_exception_handler_label (rtx);
256static void remove_eh_handler (struct eh_region *);
257static int for_each_eh_label_1 (void **, void *);
258
259/* The return value of reachable_next_level. */
260enum reachable_code
261{
262 /* The given exception is not processed by the given region. */
263 RNL_NOT_CAUGHT,
264 /* The given exception may need processing by the given region. */
265 RNL_MAYBE_CAUGHT,
266 /* The given exception is completely processed by the given region. */
267 RNL_CAUGHT,
268 /* The given exception is completely processed by the runtime. */
269 RNL_BLOCKED
270};
271
272struct reachable_info;
273static enum reachable_code reachable_next_level (struct eh_region *, tree,
274 struct reachable_info *);
275
276static int action_record_eq (const void *, const void *);
277static hashval_t action_record_hash (const void *);
278static int add_action_record (htab_t, int, int);
279static int collect_one_action_chain (htab_t, struct eh_region *);
280static int add_call_site (rtx, int);
281
282static void push_uleb128 (varray_type *, unsigned int);
283static void push_sleb128 (varray_type *, int);
284#ifndef HAVE_AS_LEB128
285static int dw2_size_of_call_site_table (void);
286static int sjlj_size_of_call_site_table (void);
287#endif
288static void dw2_output_call_site_table (void);
289static void sjlj_output_call_site_table (void);
290
291\f
292/* Routine to see if exception handling is turned on.
293 DO_WARN is nonzero if we want to inform the user that exception
294 handling is turned off.
295
296 This is used to ensure that -fexceptions has been specified if the
297 compiler tries to use any exception-specific functions. */
298
299int
300doing_eh (int do_warn)
301{
302 if (! flag_exceptions)
303 {
304 static int warned = 0;
305 if (! warned && do_warn)
306 {
307 error ("exception handling disabled, use -fexceptions to enable");
308 warned = 1;
309 }
310 return 0;
311 }
312 return 1;
313}
314
315\f
316void
317init_eh (void)
318{
319 if (! flag_exceptions)
320 return;
321
322 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
323
324 /* Create the SjLj_Function_Context structure. This should match
325 the definition in unwind-sjlj.c. */
326 if (USING_SJLJ_EXCEPTIONS)
327 {
328 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
329
330 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
331
332 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
333 build_pointer_type (sjlj_fc_type_node));
334 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
335
336 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
337 integer_type_node);
338 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
339
340 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
341 tmp = build_array_type (lang_hooks.types.type_for_mode
342 (targetm.unwind_word_mode (), 1),
343 tmp);
344 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
345 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
346
347 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
348 ptr_type_node);
349 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
350
351 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
352 ptr_type_node);
353 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
354
355#ifdef DONT_USE_BUILTIN_SETJMP
356#ifdef JMP_BUF_SIZE
357 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
358#else
359 /* Should be large enough for most systems, if it is not,
360 JMP_BUF_SIZE should be defined with the proper value. It will
361 also tend to be larger than necessary for most systems, a more
362 optimal port will define JMP_BUF_SIZE. */
363 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
364#endif
365#else
366 /* builtin_setjmp takes a pointer to 5 words. */
367 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
368#endif
369 tmp = build_index_type (tmp);
370 tmp = build_array_type (ptr_type_node, tmp);
371 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
372#ifdef DONT_USE_BUILTIN_SETJMP
373 /* We don't know what the alignment requirements of the
374 runtime's jmp_buf has. Overestimate. */
375 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
376 DECL_USER_ALIGN (f_jbuf) = 1;
377#endif
378 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
379
380 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
381 TREE_CHAIN (f_prev) = f_cs;
382 TREE_CHAIN (f_cs) = f_data;
383 TREE_CHAIN (f_data) = f_per;
384 TREE_CHAIN (f_per) = f_lsda;
385 TREE_CHAIN (f_lsda) = f_jbuf;
386
387 layout_type (sjlj_fc_type_node);
388
389 /* Cache the interesting field offsets so that we have
390 easy access from rtl. */
391 sjlj_fc_call_site_ofs
392 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
393 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
394 sjlj_fc_data_ofs
395 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
396 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
397 sjlj_fc_personality_ofs
398 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
399 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
400 sjlj_fc_lsda_ofs
401 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
402 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
403 sjlj_fc_jbuf_ofs
404 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
405 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
406 }
407}
408
409void
410init_eh_for_function (void)
411{
412 cfun->eh = GGC_CNEW (struct eh_status);
413}
414\f
415/* Routines to generate the exception tree somewhat directly.
416 These are used from tree-eh.c when processing exception related
417 nodes during tree optimization. */
418
419static struct eh_region *
420gen_eh_region (enum eh_region_type type, struct eh_region *outer)
421{
422 struct eh_region *new_eh;
423
424#ifdef ENABLE_CHECKING
425 gcc_assert (doing_eh (0));
426#endif
427
428 /* Insert a new blank region as a leaf in the tree. */
429 new_eh = GGC_CNEW (struct eh_region);
430 new_eh->type = type;
431 new_eh->outer = outer;
432 if (outer)
433 {
434 new_eh->next_peer = outer->inner;
435 outer->inner = new_eh;
436 }
437 else
438 {
439 new_eh->next_peer = cfun->eh->region_tree;
440 cfun->eh->region_tree = new_eh;
441 }
442
443 new_eh->region_number = ++cfun->eh->last_region_number;
444
445 return new_eh;
446}
447
448struct eh_region *
449gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
450{
451 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
452 cleanup->u.cleanup.prev_try = prev_try;
453 return cleanup;
454}
455
456struct eh_region *
457gen_eh_region_try (struct eh_region *outer)
458{
459 return gen_eh_region (ERT_TRY, outer);
460}
461
462struct eh_region *
463gen_eh_region_catch (struct eh_region *t, tree type_or_list)
464{
465 struct eh_region *c, *l;
466 tree type_list, type_node;
467
468 /* Ensure to always end up with a type list to normalize further
469 processing, then register each type against the runtime types map. */
470 type_list = type_or_list;
471 if (type_or_list)
472 {
473 if (TREE_CODE (type_or_list) != TREE_LIST)
474 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
475
476 type_node = type_list;
477 for (; type_node; type_node = TREE_CHAIN (type_node))
478 add_type_for_runtime (TREE_VALUE (type_node));
479 }
480
481 c = gen_eh_region (ERT_CATCH, t->outer);
482 c->u.eh_catch.type_list = type_list;
483 l = t->u.eh_try.last_catch;
484 c->u.eh_catch.prev_catch = l;
485 if (l)
486 l->u.eh_catch.next_catch = c;
487 else
488 t->u.eh_try.eh_catch = c;
489 t->u.eh_try.last_catch = c;
490
491 return c;
492}
493
494struct eh_region *
495gen_eh_region_allowed (struct eh_region *outer, tree allowed)
496{
497 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
498 region->u.allowed.type_list = allowed;
499
500 for (; allowed ; allowed = TREE_CHAIN (allowed))
501 add_type_for_runtime (TREE_VALUE (allowed));
502
503 return region;
504}
505
506struct eh_region *
507gen_eh_region_must_not_throw (struct eh_region *outer)
508{
509 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
510}
511
512int
513get_eh_region_number (struct eh_region *region)
514{
515 return region->region_number;
516}
517
518bool
519get_eh_region_may_contain_throw (struct eh_region *region)
520{
521 return region->may_contain_throw;
522}
523
524tree
525get_eh_region_tree_label (struct eh_region *region)
526{
527 return region->tree_label;
528}
529
530void
531set_eh_region_tree_label (struct eh_region *region, tree lab)
532{
533 region->tree_label = lab;
534}
535\f
536void
537expand_resx_expr (tree exp)
538{
539 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
540 struct eh_region *reg = VEC_index (eh_region,
541 cfun->eh->region_array, region_nr);
542
543 gcc_assert (!reg->resume);
544 do_pending_stack_adjust ();
545 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
546 emit_barrier ();
547}
548
549/* Note that the current EH region (if any) may contain a throw, or a
550 call to a function which itself may contain a throw. */
551
552void
553note_eh_region_may_contain_throw (struct eh_region *region)
554{
555 while (region && !region->may_contain_throw)
556 {
557 region->may_contain_throw = 1;
558 region = region->outer;
559 }
560}
561
562
563/* Return an rtl expression for a pointer to the exception object
564 within a handler. */
565
566rtx
567get_exception_pointer (void)
568{
569 if (! crtl->eh.exc_ptr)
570 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
571 return crtl->eh.exc_ptr;
572}
573
574/* Return an rtl expression for the exception dispatch filter
575 within a handler. */
576
577rtx
578get_exception_filter (void)
579{
580 if (! crtl->eh.filter)
581 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
582 return crtl->eh.filter;
583}
584\f
585/* This section is for the exception handling specific optimization pass. */
586
587/* Random access the exception region tree. */
588
589void
590collect_eh_region_array (void)
591{
592 struct eh_region *i;
593
594 i = cfun->eh->region_tree;
595 if (! i)
596 return;
597
598 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
599 cfun->eh->last_region_number + 1);
600 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
601
602 while (1)
603 {
604 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
605
606 /* If there are sub-regions, process them. */
607 if (i->inner)
608 i = i->inner;
609 /* If there are peers, process them. */
610 else if (i->next_peer)
611 i = i->next_peer;
612 /* Otherwise, step back up the tree to the next peer. */
613 else
614 {
615 do {
616 i = i->outer;
617 if (i == NULL)
618 return;
619 } while (i->next_peer == NULL);
620 i = i->next_peer;
621 }
622 }
623}
624
625/* Remove all regions whose labels are not reachable from insns. */
626
627static void
628remove_unreachable_regions (rtx insns)
629{
630 int i, *uid_region_num;
631 bool *reachable;
632 struct eh_region *r;
633 rtx insn;
634
635 uid_region_num = XCNEWVEC (int, get_max_uid ());
636 reachable = XCNEWVEC (bool, cfun->eh->last_region_number + 1);
637
638 for (i = cfun->eh->last_region_number; i > 0; --i)
639 {
640 r = VEC_index (eh_region, cfun->eh->region_array, i);
641 if (!r || r->region_number != i)
642 continue;
643
644 if (r->resume)
645 {
646 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
647 uid_region_num[INSN_UID (r->resume)] = i;
648 }
649 if (r->label)
650 {
651 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
652 uid_region_num[INSN_UID (r->label)] = i;
653 }
654 }
655
656 for (insn = insns; insn; insn = NEXT_INSN (insn))
657 reachable[uid_region_num[INSN_UID (insn)]] = true;
658
659 for (i = cfun->eh->last_region_number; i > 0; --i)
660 {
661 r = VEC_index (eh_region, cfun->eh->region_array, i);
662 if (r && r->region_number == i && !reachable[i])
663 {
664 bool kill_it = true;
665 switch (r->type)
666 {
667 case ERT_THROW:
668 /* Don't remove ERT_THROW regions if their outer region
669 is reachable. */
670 if (r->outer && reachable[r->outer->region_number])
671 kill_it = false;
672 break;
673
674 case ERT_MUST_NOT_THROW:
675 /* MUST_NOT_THROW regions are implementable solely in the
676 runtime, but their existence continues to affect calls
677 within that region. Never delete them here. */
678 kill_it = false;
679 break;
680
681 case ERT_TRY:
682 {
683 /* TRY regions are reachable if any of its CATCH regions
684 are reachable. */
685 struct eh_region *c;
686 for (c = r->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
687 if (reachable[c->region_number])
688 {
689 kill_it = false;
690 break;
691 }
692 break;
693 }
694
695 default:
696 break;
697 }
698
699 if (kill_it)
700 remove_eh_handler (r);
701 }
702 }
703
704 free (reachable);
705 free (uid_region_num);
706}
707
708/* Set up EH labels for RTL. */
709
710void
711convert_from_eh_region_ranges (void)
712{
713 rtx insns = get_insns ();
714 int i, n = cfun->eh->last_region_number;
715
716 /* Most of the work is already done at the tree level. All we need to
717 do is collect the rtl labels that correspond to the tree labels that
718 collect the rtl labels that correspond to the tree labels
719 we allocated earlier. */
720 for (i = 1; i <= n; ++i)
721 {
722 struct eh_region *region;
723
724 region = VEC_index (eh_region, cfun->eh->region_array, i);
725 if (region && region->tree_label)
726 region->label = DECL_RTL_IF_SET (region->tree_label);
727 }
728
729 remove_unreachable_regions (insns);
730}
731
732static void
733add_ehl_entry (rtx label, struct eh_region *region)
734{
735 struct ehl_map_entry **slot, *entry;
736
737 LABEL_PRESERVE_P (label) = 1;
738
739 entry = GGC_NEW (struct ehl_map_entry);
740 entry->label = label;
741 entry->region = region;
742
743 slot = (struct ehl_map_entry **)
744 htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
745
746 /* Before landing pad creation, each exception handler has its own
747 label. After landing pad creation, the exception handlers may
748 share landing pads. This is ok, since maybe_remove_eh_handler
749 only requires the 1-1 mapping before landing pad creation. */
750 gcc_assert (!*slot || crtl->eh.built_landing_pads);
751
752 *slot = entry;
753}
754
755void
756find_exception_handler_labels (void)
757{
758 int i;
759
760 if (crtl->eh.exception_handler_label_map)
761 htab_empty (crtl->eh.exception_handler_label_map);
762 else
763 {
764 /* ??? The expansion factor here (3/2) must be greater than the htab
765 occupancy factor (4/3) to avoid unnecessary resizing. */
766 crtl->eh.exception_handler_label_map
767 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
768 ehl_hash, ehl_eq, NULL);
769 }
770
771 if (cfun->eh->region_tree == NULL)
772 return;
773
774 for (i = cfun->eh->last_region_number; i > 0; --i)
775 {
776 struct eh_region *region;
777 rtx lab;
778
779 region = VEC_index (eh_region, cfun->eh->region_array, i);
780 if (! region || region->region_number != i)
781 continue;
782 if (crtl->eh.built_landing_pads)
783 lab = region->landing_pad;
784 else
785 lab = region->label;
786
787 if (lab)
788 add_ehl_entry (lab, region);
789 }
790
791 /* For sjlj exceptions, need the return label to remain live until
792 after landing pad generation. */
793 if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
794 add_ehl_entry (return_label, NULL);
795}
796
797/* Returns true if the current function has exception handling regions. */
798
799bool
800current_function_has_exception_handlers (void)
801{
802 int i;
803
804 for (i = cfun->eh->last_region_number; i > 0; --i)
805 {
806 struct eh_region *region;
807
808 region = VEC_index (eh_region, cfun->eh->region_array, i);
809 if (region
810 && region->region_number == i
811 && region->type != ERT_THROW)
812 return true;
813 }
814
815 return false;
816}
817\f
818/* A subroutine of duplicate_eh_regions. Search the region tree under O
819 for the minimum and maximum region numbers. Update *MIN and *MAX. */
820
821static void
822duplicate_eh_regions_0 (eh_region o, int *min, int *max)
823{
824 if (o->region_number < *min)
825 *min = o->region_number;
826 if (o->region_number > *max)
827 *max = o->region_number;
828
829 if (o->inner)
830 {
831 o = o->inner;
832 duplicate_eh_regions_0 (o, min, max);
833 while (o->next_peer)
834 {
835 o = o->next_peer;
836 duplicate_eh_regions_0 (o, min, max);
837 }
838 }
839}
840
841/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
842 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
843 about the other internal pointers just yet, just the tree-like pointers. */
844
845static eh_region
846duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
847{
848 eh_region ret, n;
849
850 ret = n = GGC_NEW (struct eh_region);
851
852 *n = *old;
853 n->outer = outer;
854 n->next_peer = NULL;
855 gcc_assert (!old->aka);
856
857 n->region_number += eh_offset;
858 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
859
860 if (old->inner)
861 {
862 old = old->inner;
863 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
864 while (old->next_peer)
865 {
866 old = old->next_peer;
867 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
868 }
869 }
870
871 return ret;
872}
873
874/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
875 function and root the tree below OUTER_REGION. Remap labels using MAP
876 callback. The special case of COPY_REGION of 0 means all regions. */
877
878int
879duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
880 void *data, int copy_region, int outer_region)
881{
882 eh_region cur, prev_try, outer, *splice;
883 int i, min_region, max_region, eh_offset, cfun_last_region_number;
884 int num_regions;
885
886 if (!ifun->eh->region_tree)
887 return 0;
888
889 /* Find the range of region numbers to be copied. The interface we
890 provide here mandates a single offset to find new number from old,
891 which means we must look at the numbers present, instead of the
892 count or something else. */
893 if (copy_region > 0)
894 {
895 min_region = INT_MAX;
896 max_region = 0;
897
898 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
899 duplicate_eh_regions_0 (cur, &min_region, &max_region);
900 }
901 else
902 min_region = 1, max_region = ifun->eh->last_region_number;
903 num_regions = max_region - min_region + 1;
904 cfun_last_region_number = cfun->eh->last_region_number;
905 eh_offset = cfun_last_region_number + 1 - min_region;
906
907 /* If we've not yet created a region array, do so now. */
908 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
909 cfun_last_region_number + 1 + num_regions);
910 cfun->eh->last_region_number = max_region + eh_offset;
911
912 /* We may have just allocated the array for the first time.
913 Make sure that element zero is null. */
914 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
915
916 /* Zero all entries in the range allocated. */
917 memset (VEC_address (eh_region, cfun->eh->region_array)
918 + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
919
920 /* Locate the spot at which to insert the new tree. */
921 if (outer_region > 0)
922 {
923 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
924 splice = &outer->inner;
925 }
926 else
927 {
928 outer = NULL;
929 splice = &cfun->eh->region_tree;
930 }
931 while (*splice)
932 splice = &(*splice)->next_peer;
933
934 /* Copy all the regions in the subtree. */
935 if (copy_region > 0)
936 {
937 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
938 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
939 }
940 else
941 {
942 eh_region n;
943
944 cur = ifun->eh->region_tree;
945 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
946 while (cur->next_peer)
947 {
948 cur = cur->next_peer;
949 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
950 }
951 }
952
953 /* Remap all the labels in the new regions. */
954 for (i = cfun_last_region_number + 1;
955 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
956 if (cur && cur->tree_label)
957 cur->tree_label = map (cur->tree_label, data);
958
959 /* Search for the containing ERT_TRY region to fix up
960 the prev_try short-cuts for ERT_CLEANUP regions. */
961 prev_try = NULL;
962 if (outer_region > 0)
963 for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
964 prev_try && prev_try->type != ERT_TRY;
965 prev_try = prev_try->outer)
966 if (prev_try->type == ERT_MUST_NOT_THROW
967 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
968 && !prev_try->u.allowed.type_list))
969 {
970 prev_try = NULL;
971 break;
972 }
973
974 /* Remap all of the internal catch and cleanup linkages. Since we
975 duplicate entire subtrees, all of the referenced regions will have
976 been copied too. And since we renumbered them as a block, a simple
977 bit of arithmetic finds us the index for the replacement region. */
978 for (i = cfun_last_region_number + 1;
979 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
980 {
981 if (cur == NULL)
982 continue;
983
984#define REMAP(REG) \
985 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
986 (REG)->region_number + eh_offset)
987
988 switch (cur->type)
989 {
990 case ERT_TRY:
991 if (cur->u.eh_try.eh_catch)
992 REMAP (cur->u.eh_try.eh_catch);
993 if (cur->u.eh_try.last_catch)
994 REMAP (cur->u.eh_try.last_catch);
995 break;
996
997 case ERT_CATCH:
998 if (cur->u.eh_catch.next_catch)
999 REMAP (cur->u.eh_catch.next_catch);
1000 if (cur->u.eh_catch.prev_catch)
1001 REMAP (cur->u.eh_catch.prev_catch);
1002 break;
1003
1004 case ERT_CLEANUP:
1005 if (cur->u.cleanup.prev_try)
1006 REMAP (cur->u.cleanup.prev_try);
1007 else
1008 cur->u.cleanup.prev_try = prev_try;
1009 break;
1010
1011 default:
1012 break;
1013 }
1014
1015#undef REMAP
1016 }
1017
1018 return eh_offset;
1019}
1020
1021/* Return true if REGION_A is outer to REGION_B in IFUN. */
1022
1023bool
1024eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1025{
1026 struct eh_region *rp_a, *rp_b;
1027
1028 gcc_assert (ifun->eh->last_region_number > 0);
1029 gcc_assert (ifun->eh->region_tree);
1030
1031 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1032 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1033 gcc_assert (rp_a != NULL);
1034 gcc_assert (rp_b != NULL);
1035
1036 do
1037 {
1038 if (rp_a == rp_b)
1039 return true;
1040 rp_b = rp_b->outer;
1041 }
1042 while (rp_b);
1043
1044 return false;
1045}
1046
1047/* Return region number of region that is outer to both if REGION_A and
1048 REGION_B in IFUN. */
1049
1050int
1051eh_region_outermost (struct function *ifun, int region_a, int region_b)
1052{
1053 struct eh_region *rp_a, *rp_b;
1054 sbitmap b_outer;
1055
1056 gcc_assert (ifun->eh->last_region_number > 0);
1057 gcc_assert (ifun->eh->region_tree);
1058
1059 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1060 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1061 gcc_assert (rp_a != NULL);
1062 gcc_assert (rp_b != NULL);
1063
1064 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1065 sbitmap_zero (b_outer);
1066
1067 do
1068 {
1069 SET_BIT (b_outer, rp_b->region_number);
1070 rp_b = rp_b->outer;
1071 }
1072 while (rp_b);
1073
1074 do
1075 {
1076 if (TEST_BIT (b_outer, rp_a->region_number))
1077 {
1078 sbitmap_free (b_outer);
1079 return rp_a->region_number;
1080 }
1081 rp_a = rp_a->outer;
1082 }
1083 while (rp_a);
1084
1085 sbitmap_free (b_outer);
1086 return -1;
1087}
1088\f
1089static int
1090t2r_eq (const void *pentry, const void *pdata)
1091{
1092 const_tree const entry = (const_tree) pentry;
1093 const_tree const data = (const_tree) pdata;
1094
1095 return TREE_PURPOSE (entry) == data;
1096}
1097
1098static hashval_t
1099t2r_hash (const void *pentry)
1100{
1101 const_tree const entry = (const_tree) pentry;
1102 return TREE_HASH (TREE_PURPOSE (entry));
1103}
1104
1105static void
1106add_type_for_runtime (tree type)
1107{
1108 tree *slot;
1109
1110 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1111 TREE_HASH (type), INSERT);
1112 if (*slot == NULL)
1113 {
1114 tree runtime = (*lang_eh_runtime_type) (type);
1115 *slot = tree_cons (type, runtime, NULL_TREE);
1116 }
1117}
1118
1119static tree
1120lookup_type_for_runtime (tree type)
1121{
1122 tree *slot;
1123
1124 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1125 TREE_HASH (type), NO_INSERT);
1126
1127 /* We should have always inserted the data earlier. */
1128 return TREE_VALUE (*slot);
1129}
1130
1131\f
1132/* Represent an entry in @TTypes for either catch actions
1133 or exception filter actions. */
1134struct ttypes_filter GTY(())
1135{
1136 tree t;
1137 int filter;
1138};
1139
1140/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1141 (a tree) for a @TTypes type node we are thinking about adding. */
1142
1143static int
1144ttypes_filter_eq (const void *pentry, const void *pdata)
1145{
1146 const struct ttypes_filter *const entry
1147 = (const struct ttypes_filter *) pentry;
1148 const_tree const data = (const_tree) pdata;
1149
1150 return entry->t == data;
1151}
1152
1153static hashval_t
1154ttypes_filter_hash (const void *pentry)
1155{
1156 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1157 return TREE_HASH (entry->t);
1158}
1159
1160/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1161 exception specification list we are thinking about adding. */
1162/* ??? Currently we use the type lists in the order given. Someone
1163 should put these in some canonical order. */
1164
1165static int
1166ehspec_filter_eq (const void *pentry, const void *pdata)
1167{
1168 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1169 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1170
1171 return type_list_equal (entry->t, data->t);
1172}
1173
1174/* Hash function for exception specification lists. */
1175
1176static hashval_t
1177ehspec_filter_hash (const void *pentry)
1178{
1179 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1180 hashval_t h = 0;
1181 tree list;
1182
1183 for (list = entry->t; list ; list = TREE_CHAIN (list))
1184 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1185 return h;
1186}
1187
1188/* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1189 to speed up the search. Return the filter value to be used. */
1190
1191static int
1192add_ttypes_entry (htab_t ttypes_hash, tree type)
1193{
1194 struct ttypes_filter **slot, *n;
1195
1196 slot = (struct ttypes_filter **)
1197 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1198
1199 if ((n = *slot) == NULL)
1200 {
1201 /* Filter value is a 1 based table index. */
1202
1203 n = XNEW (struct ttypes_filter);
1204 n->t = type;
1205 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1206 *slot = n;
1207
1208 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1209 }
1210
1211 return n->filter;
1212}
1213
1214/* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1215 to speed up the search. Return the filter value to be used. */
1216
1217static int
1218add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1219{
1220 struct ttypes_filter **slot, *n;
1221 struct ttypes_filter dummy;
1222
1223 dummy.t = list;
1224 slot = (struct ttypes_filter **)
1225 htab_find_slot (ehspec_hash, &dummy, INSERT);
1226
1227 if ((n = *slot) == NULL)
1228 {
1229 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1230
1231 n = XNEW (struct ttypes_filter);
1232 n->t = list;
1233 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1234 *slot = n;
1235
1236 /* Generate a 0 terminated list of filter values. */
1237 for (; list ; list = TREE_CHAIN (list))
1238 {
1239 if (targetm.arm_eabi_unwinder)
1240 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1241 else
1242 {
1243 /* Look up each type in the list and encode its filter
1244 value as a uleb128. */
1245 push_uleb128 (&crtl->eh.ehspec_data,
1246 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1247 }
1248 }
1249 if (targetm.arm_eabi_unwinder)
1250 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1251 else
1252 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1253 }
1254
1255 return n->filter;
1256}
1257
1258/* Generate the action filter values to be used for CATCH and
1259 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1260 we use lots of landing pads, and so every type or list can share
1261 the same filter value, which saves table space. */
1262
1263static void
1264assign_filter_values (void)
1265{
1266 int i;
1267 htab_t ttypes, ehspec;
1268
1269 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1270 if (targetm.arm_eabi_unwinder)
1271 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1272 else
1273 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1274
1275 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1276 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1277
1278 for (i = cfun->eh->last_region_number; i > 0; --i)
1279 {
1280 struct eh_region *r;
1281
1282 r = VEC_index (eh_region, cfun->eh->region_array, i);
1283
1284 /* Mind we don't process a region more than once. */
1285 if (!r || r->region_number != i)
1286 continue;
1287
1288 switch (r->type)
1289 {
1290 case ERT_CATCH:
1291 /* Whatever type_list is (NULL or true list), we build a list
1292 of filters for the region. */
1293 r->u.eh_catch.filter_list = NULL_TREE;
1294
1295 if (r->u.eh_catch.type_list != NULL)
1296 {
1297 /* Get a filter value for each of the types caught and store
1298 them in the region's dedicated list. */
1299 tree tp_node = r->u.eh_catch.type_list;
1300
1301 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1302 {
1303 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1304 tree flt_node = build_int_cst (NULL_TREE, flt);
1305
1306 r->u.eh_catch.filter_list
1307 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1308 }
1309 }
1310 else
1311 {
1312 /* Get a filter value for the NULL list also since it will need
1313 an action record anyway. */
1314 int flt = add_ttypes_entry (ttypes, NULL);
1315 tree flt_node = build_int_cst (NULL_TREE, flt);
1316
1317 r->u.eh_catch.filter_list
1318 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1319 }
1320
1321 break;
1322
1323 case ERT_ALLOWED_EXCEPTIONS:
1324 r->u.allowed.filter
1325 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1326 break;
1327
1328 default:
1329 break;
1330 }
1331 }
1332
1333 htab_delete (ttypes);
1334 htab_delete (ehspec);
1335}
1336
1337/* Emit SEQ into basic block just before INSN (that is assumed to be
1338 first instruction of some existing BB and return the newly
1339 produced block. */
1340static basic_block
1341emit_to_new_bb_before (rtx seq, rtx insn)
1342{
1343 rtx last;
1344 basic_block bb;
1345 edge e;
1346 edge_iterator ei;
1347
1348 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1349 call), we don't want it to go into newly created landing pad or other EH
1350 construct. */
1351 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1352 if (e->flags & EDGE_FALLTHRU)
1353 force_nonfallthru (e);
1354 else
1355 ei_next (&ei);
1356 last = emit_insn_before (seq, insn);
1357 if (BARRIER_P (last))
1358 last = PREV_INSN (last);
1359 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1360 update_bb_for_insn (bb);
1361 bb->flags |= BB_SUPERBLOCK;
1362 return bb;
1363}
1364
1365/* Generate the code to actually handle exceptions, which will follow the
1366 landing pads. */
1367
1368static void
1369build_post_landing_pads (void)
1370{
1371 int i;
1372
1373 for (i = cfun->eh->last_region_number; i > 0; --i)
1374 {
1375 struct eh_region *region;
1376 rtx seq;
1377
1378 region = VEC_index (eh_region, cfun->eh->region_array, i);
1379 /* Mind we don't process a region more than once. */
1380 if (!region || region->region_number != i)
1381 continue;
1382
1383 switch (region->type)
1384 {
1385 case ERT_TRY:
1386 /* ??? Collect the set of all non-overlapping catch handlers
1387 all the way up the chain until blocked by a cleanup. */
1388 /* ??? Outer try regions can share landing pads with inner
1389 try regions if the types are completely non-overlapping,
1390 and there are no intervening cleanups. */
1391
1392 region->post_landing_pad = gen_label_rtx ();
1393
1394 start_sequence ();
1395
1396 emit_label (region->post_landing_pad);
1397
1398 /* ??? It is mighty inconvenient to call back into the
1399 switch statement generation code in expand_end_case.
1400 Rapid prototyping sez a sequence of ifs. */
1401 {
1402 struct eh_region *c;
1403 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1404 {
1405 if (c->u.eh_catch.type_list == NULL)
1406 emit_jump (c->label);
1407 else
1408 {
1409 /* Need for one cmp/jump per type caught. Each type
1410 list entry has a matching entry in the filter list
1411 (see assign_filter_values). */
1412 tree tp_node = c->u.eh_catch.type_list;
1413 tree flt_node = c->u.eh_catch.filter_list;
1414
1415 for (; tp_node; )
1416 {
1417 emit_cmp_and_jump_insns
1418 (crtl->eh.filter,
1419 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1420 EQ, NULL_RTX,
1421 targetm.eh_return_filter_mode (), 0, c->label);
1422
1423 tp_node = TREE_CHAIN (tp_node);
1424 flt_node = TREE_CHAIN (flt_node);
1425 }
1426 }
1427 }
1428 }
1429
1430 /* We delay the generation of the _Unwind_Resume until we generate
1431 landing pads. We emit a marker here so as to get good control
1432 flow data in the meantime. */
1433 region->resume
1434 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1435 emit_barrier ();
1436
1437 seq = get_insns ();
1438 end_sequence ();
1439
1440 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1441
1442 break;
1443
1444 case ERT_ALLOWED_EXCEPTIONS:
1445 region->post_landing_pad = gen_label_rtx ();
1446
1447 start_sequence ();
1448
1449 emit_label (region->post_landing_pad);
1450
1451 emit_cmp_and_jump_insns (crtl->eh.filter,
1452 GEN_INT (region->u.allowed.filter),
1453 EQ, NULL_RTX,
1454 targetm.eh_return_filter_mode (), 0, region->label);
1455
1456 /* We delay the generation of the _Unwind_Resume until we generate
1457 landing pads. We emit a marker here so as to get good control
1458 flow data in the meantime. */
1459 region->resume
1460 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1461 emit_barrier ();
1462
1463 seq = get_insns ();
1464 end_sequence ();
1465
1466 emit_to_new_bb_before (seq, region->label);
1467 break;
1468
1469 case ERT_CLEANUP:
1470 case ERT_MUST_NOT_THROW:
1471 region->post_landing_pad = region->label;
1472 break;
1473
1474 case ERT_CATCH:
1475 case ERT_THROW:
1476 /* Nothing to do. */
1477 break;
1478
1479 default:
1480 gcc_unreachable ();
1481 }
1482 }
1483}
1484
1485/* Replace RESX patterns with jumps to the next handler if any, or calls to
1486 _Unwind_Resume otherwise. */
1487
1488static void
1489connect_post_landing_pads (void)
1490{
1491 int i;
1492
1493 for (i = cfun->eh->last_region_number; i > 0; --i)
1494 {
1495 struct eh_region *region;
1496 struct eh_region *outer;
1497 rtx seq;
1498 rtx barrier;
1499
1500 region = VEC_index (eh_region, cfun->eh->region_array, i);
1501 /* Mind we don't process a region more than once. */
1502 if (!region || region->region_number != i)
1503 continue;
1504
1505 /* If there is no RESX, or it has been deleted by flow, there's
1506 nothing to fix up. */
1507 if (! region->resume || INSN_DELETED_P (region->resume))
1508 continue;
1509
1510 /* Search for another landing pad in this function. */
1511 for (outer = region->outer; outer ; outer = outer->outer)
1512 if (outer->post_landing_pad)
1513 break;
1514
1515 start_sequence ();
1516
1517 if (outer)
1518 {
1519 edge e;
1520 basic_block src, dest;
1521
1522 emit_jump (outer->post_landing_pad);
1523 src = BLOCK_FOR_INSN (region->resume);
1524 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1525 while (EDGE_COUNT (src->succs) > 0)
1526 remove_edge (EDGE_SUCC (src, 0));
1527 e = make_edge (src, dest, 0);
1528 e->probability = REG_BR_PROB_BASE;
1529 e->count = src->count;
1530 }
1531 else
1532 {
1533 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1534 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1535
1536 /* What we just emitted was a throwing libcall, so it got a
1537 barrier automatically added after it. If the last insn in
1538 the libcall sequence isn't the barrier, it's because the
1539 target emits multiple insns for a call, and there are insns
1540 after the actual call insn (which are redundant and would be
1541 optimized away). The barrier is inserted exactly after the
1542 call insn, so let's go get that and delete the insns after
1543 it, because below we need the barrier to be the last insn in
1544 the sequence. */
1545 delete_insns_since (NEXT_INSN (last_call_insn ()));
1546 }
1547
1548 seq = get_insns ();
1549 end_sequence ();
1550 barrier = emit_insn_before (seq, region->resume);
1551 /* Avoid duplicate barrier. */
1552 gcc_assert (BARRIER_P (barrier));
1553 delete_insn (barrier);
1554 delete_insn (region->resume);
1555
1556 /* ??? From tree-ssa we can wind up with catch regions whose
1557 label is not instantiated, but whose resx is present. Now
1558 that we've dealt with the resx, kill the region. */
1559 if (region->label == NULL && region->type == ERT_CLEANUP)
1560 remove_eh_handler (region);
1561 }
1562}
1563
1564\f
1565static void
1566dw2_build_landing_pads (void)
1567{
1568 int i;
1569
1570 for (i = cfun->eh->last_region_number; i > 0; --i)
1571 {
1572 struct eh_region *region;
1573 rtx seq;
1574 basic_block bb;
1575 edge e;
1576
1577 region = VEC_index (eh_region, cfun->eh->region_array, i);
1578 /* Mind we don't process a region more than once. */
1579 if (!region || region->region_number != i)
1580 continue;
1581
1582 if (region->type != ERT_CLEANUP
1583 && region->type != ERT_TRY
1584 && region->type != ERT_ALLOWED_EXCEPTIONS)
1585 continue;
1586
1587 start_sequence ();
1588
1589 region->landing_pad = gen_label_rtx ();
1590 emit_label (region->landing_pad);
1591
1592#ifdef HAVE_exception_receiver
1593 if (HAVE_exception_receiver)
1594 emit_insn (gen_exception_receiver ());
1595 else
1596#endif
1597#ifdef HAVE_nonlocal_goto_receiver
1598 if (HAVE_nonlocal_goto_receiver)
1599 emit_insn (gen_nonlocal_goto_receiver ());
1600 else
1601#endif
1602 { /* Nothing */ }
1603
1604 emit_move_insn (crtl->eh.exc_ptr,
1605 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1606 emit_move_insn (crtl->eh.filter,
1607 gen_rtx_REG (targetm.eh_return_filter_mode (),
1608 EH_RETURN_DATA_REGNO (1)));
1609
1610 seq = get_insns ();
1611 end_sequence ();
1612
1613 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1614 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1615 e->count = bb->count;
1616 e->probability = REG_BR_PROB_BASE;
1617 }
1618}
1619
1620\f
1621struct sjlj_lp_info
1622{
1623 int directly_reachable;
1624 int action_index;
1625 int dispatch_index;
1626 int call_site_index;
1627};
1628
1629static bool
1630sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1631{
1632 rtx insn;
1633 bool found_one = false;
1634
1635 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1636 {
1637 struct eh_region *region;
1638 enum reachable_code rc;
1639 tree type_thrown;
1640 rtx note;
1641
1642 if (! INSN_P (insn))
1643 continue;
1644
1645 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1646 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1647 continue;
1648
1649 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1650
1651 type_thrown = NULL_TREE;
1652 if (region->type == ERT_THROW)
1653 {
1654 type_thrown = region->u.eh_throw.type;
1655 region = region->outer;
1656 }
1657
1658 /* Find the first containing region that might handle the exception.
1659 That's the landing pad to which we will transfer control. */
1660 rc = RNL_NOT_CAUGHT;
1661 for (; region; region = region->outer)
1662 {
1663 rc = reachable_next_level (region, type_thrown, NULL);
1664 if (rc != RNL_NOT_CAUGHT)
1665 break;
1666 }
1667 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1668 {
1669 lp_info[region->region_number].directly_reachable = 1;
1670 found_one = true;
1671 }
1672 }
1673
1674 return found_one;
1675}
1676
1677static void
1678sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1679{
1680 htab_t ar_hash;
1681 int i, index;
1682
1683 /* First task: build the action table. */
1684
1685 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1686 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1687
1688 for (i = cfun->eh->last_region_number; i > 0; --i)
1689 if (lp_info[i].directly_reachable)
1690 {
1691 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1692
1693 r->landing_pad = dispatch_label;
1694 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1695 if (lp_info[i].action_index != -1)
1696 crtl->uses_eh_lsda = 1;
1697 }
1698
1699 htab_delete (ar_hash);
1700
1701 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1702 landing pad label for the region. For sjlj though, there is one
1703 common landing pad from which we dispatch to the post-landing pads.
1704
1705 A region receives a dispatch index if it is directly reachable
1706 and requires in-function processing. Regions that share post-landing
1707 pads may share dispatch indices. */
1708 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1709 (see build_post_landing_pads) so we don't bother checking for it. */
1710
1711 index = 0;
1712 for (i = cfun->eh->last_region_number; i > 0; --i)
1713 if (lp_info[i].directly_reachable)
1714 lp_info[i].dispatch_index = index++;
1715
1716 /* Finally: assign call-site values. If dwarf2 terms, this would be
1717 the region number assigned by convert_to_eh_region_ranges, but
1718 handles no-action and must-not-throw differently. */
1719
1720 call_site_base = 1;
1721 for (i = cfun->eh->last_region_number; i > 0; --i)
1722 if (lp_info[i].directly_reachable)
1723 {
1724 int action = lp_info[i].action_index;
1725
1726 /* Map must-not-throw to otherwise unused call-site index 0. */
1727 if (action == -2)
1728 index = 0;
1729 /* Map no-action to otherwise unused call-site index -1. */
1730 else if (action == -1)
1731 index = -1;
1732 /* Otherwise, look it up in the table. */
1733 else
1734 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1735
1736 lp_info[i].call_site_index = index;
1737 }
1738}
1739
1740static void
1741sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1742{
1743 int last_call_site = -2;
1744 rtx insn, mem;
1745
1746 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1747 {
1748 struct eh_region *region;
1749 int this_call_site;
1750 rtx note, before, p;
1751
1752 /* Reset value tracking at extended basic block boundaries. */
1753 if (LABEL_P (insn))
1754 last_call_site = -2;
1755
1756 if (! INSN_P (insn))
1757 continue;
1758
1759 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1760 if (!note)
1761 {
1762 /* Calls (and trapping insns) without notes are outside any
1763 exception handling region in this function. Mark them as
1764 no action. */
1765 if (CALL_P (insn)
1766 || (flag_non_call_exceptions
1767 && may_trap_p (PATTERN (insn))))
1768 this_call_site = -1;
1769 else
1770 continue;
1771 }
1772 else
1773 {
1774 /* Calls that are known to not throw need not be marked. */
1775 if (INTVAL (XEXP (note, 0)) <= 0)
1776 continue;
1777
1778 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1779 this_call_site = lp_info[region->region_number].call_site_index;
1780 }
1781
1782 if (this_call_site == last_call_site)
1783 continue;
1784
1785 /* Don't separate a call from it's argument loads. */
1786 before = insn;
1787 if (CALL_P (insn))
1788 before = find_first_parameter_load (insn, NULL_RTX);
1789
1790 start_sequence ();
1791 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1792 sjlj_fc_call_site_ofs);
1793 emit_move_insn (mem, GEN_INT (this_call_site));
1794 p = get_insns ();
1795 end_sequence ();
1796
1797 emit_insn_before (p, before);
1798 last_call_site = this_call_site;
1799 }
1800}
1801
1802/* Construct the SjLj_Function_Context. */
1803
1804static void
1805sjlj_emit_function_enter (rtx dispatch_label)
1806{
1807 rtx fn_begin, fc, mem, seq;
1808 bool fn_begin_outside_block;
1809
1810 fc = crtl->eh.sjlj_fc;
1811
1812 start_sequence ();
1813
1814 /* We're storing this libcall's address into memory instead of
1815 calling it directly. Thus, we must call assemble_external_libcall
1816 here, as we can not depend on emit_library_call to do it for us. */
1817 assemble_external_libcall (eh_personality_libfunc);
1818 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1819 emit_move_insn (mem, eh_personality_libfunc);
1820
1821 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1822 if (crtl->uses_eh_lsda)
1823 {
1824 char buf[20];
1825 rtx sym;
1826
1827 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1828 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1829 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1830 emit_move_insn (mem, sym);
1831 }
1832 else
1833 emit_move_insn (mem, const0_rtx);
1834
1835#ifdef DONT_USE_BUILTIN_SETJMP
1836 {
4b1e227d 1837 rtx x, last;
c251ad9e
SS
1838 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1839 TYPE_MODE (integer_type_node), 1,
1840 plus_constant (XEXP (fc, 0),
1841 sjlj_fc_jbuf_ofs), Pmode);
1842
1843 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1844 TYPE_MODE (integer_type_node), 0, dispatch_label);
4b1e227d
SW
1845 last = get_last_insn ();
1846 if (JUMP_P (last) && any_condjump_p (last))
1847 {
1848 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1849 add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1850 }
c251ad9e
SS
1851 }
1852#else
1853 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1854 dispatch_label);
1855#endif
1856
1857 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1858 1, XEXP (fc, 0), Pmode);
1859
1860 seq = get_insns ();
1861 end_sequence ();
1862
1863 /* ??? Instead of doing this at the beginning of the function,
1864 do this in a block that is at loop level 0 and dominates all
1865 can_throw_internal instructions. */
1866
1867 fn_begin_outside_block = true;
1868 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1869 if (NOTE_P (fn_begin))
1870 {
1871 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1872 break;
1873 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1874 fn_begin_outside_block = false;
1875 }
1876
1877 if (fn_begin_outside_block)
1878 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1879 else
1880 emit_insn_after (seq, fn_begin);
1881}
1882
1883/* Call back from expand_function_end to know where we should put
1884 the call to unwind_sjlj_unregister_libfunc if needed. */
1885
1886void
1887sjlj_emit_function_exit_after (rtx after)
1888{
1889 crtl->eh.sjlj_exit_after = after;
1890}
1891
1892static void
1893sjlj_emit_function_exit (void)
1894{
1895 rtx seq;
1896 edge e;
1897 edge_iterator ei;
1898
1899 start_sequence ();
1900
1901 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1902 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1903
1904 seq = get_insns ();
1905 end_sequence ();
1906
1907 /* ??? Really this can be done in any block at loop level 0 that
1908 post-dominates all can_throw_internal instructions. This is
1909 the last possible moment. */
1910
1911 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1912 if (e->flags & EDGE_FALLTHRU)
1913 break;
1914 if (e)
1915 {
1916 rtx insn;
1917
1918 /* Figure out whether the place we are supposed to insert libcall
1919 is inside the last basic block or after it. In the other case
1920 we need to emit to edge. */
1921 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1922 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1923 {
1924 if (insn == crtl->eh.sjlj_exit_after)
1925 {
1926 if (LABEL_P (insn))
1927 insn = NEXT_INSN (insn);
1928 emit_insn_after (seq, insn);
1929 return;
1930 }
1931 if (insn == BB_END (e->src))
1932 break;
1933 }
1934 insert_insn_on_edge (seq, e);
1935 }
1936}
1937
1938static void
1939sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1940{
1941 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1942 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1943 int i, first_reachable;
1944 rtx mem, dispatch, seq, fc;
1945 rtx before;
1946 basic_block bb;
1947 edge e;
1948
1949 fc = crtl->eh.sjlj_fc;
1950
1951 start_sequence ();
1952
1953 emit_label (dispatch_label);
1954
1955#ifndef DONT_USE_BUILTIN_SETJMP
1956 expand_builtin_setjmp_receiver (dispatch_label);
1957#endif
1958
1959 /* Load up dispatch index, exc_ptr and filter values from the
1960 function context. */
1961 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1962 sjlj_fc_call_site_ofs);
1963 dispatch = copy_to_reg (mem);
1964
1965 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1966 if (unwind_word_mode != ptr_mode)
1967 {
1968#ifdef POINTERS_EXTEND_UNSIGNED
1969 mem = convert_memory_address (ptr_mode, mem);
1970#else
1971 mem = convert_to_mode (ptr_mode, mem, 0);
1972#endif
1973 }
1974 emit_move_insn (crtl->eh.exc_ptr, mem);
1975
1976 mem = adjust_address (fc, unwind_word_mode,
1977 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1978 if (unwind_word_mode != filter_mode)
1979 mem = convert_to_mode (filter_mode, mem, 0);
1980 emit_move_insn (crtl->eh.filter, mem);
1981
1982 /* Jump to one of the directly reachable regions. */
1983 /* ??? This really ought to be using a switch statement. */
1984
1985 first_reachable = 0;
1986 for (i = cfun->eh->last_region_number; i > 0; --i)
1987 {
1988 if (! lp_info[i].directly_reachable)
1989 continue;
1990
1991 if (! first_reachable)
1992 {
1993 first_reachable = i;
1994 continue;
1995 }
1996
1997 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1998 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1999 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2000 ->post_landing_pad);
2001 }
2002
2003 seq = get_insns ();
2004 end_sequence ();
2005
2006 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2007 ->post_landing_pad);
2008
2009 bb = emit_to_new_bb_before (seq, before);
2010 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2011 e->count = bb->count;
2012 e->probability = REG_BR_PROB_BASE;
2013}
2014
2015static void
2016sjlj_build_landing_pads (void)
2017{
2018 struct sjlj_lp_info *lp_info;
2019
2020 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2021
2022 if (sjlj_find_directly_reachable_regions (lp_info))
2023 {
2024 rtx dispatch_label = gen_label_rtx ();
2025 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2026 TYPE_MODE (sjlj_fc_type_node),
2027 TYPE_ALIGN (sjlj_fc_type_node));
2028 crtl->eh.sjlj_fc
2029 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2030 int_size_in_bytes (sjlj_fc_type_node),
2031 align);
2032
2033 sjlj_assign_call_site_values (dispatch_label, lp_info);
2034 sjlj_mark_call_sites (lp_info);
2035
2036 sjlj_emit_function_enter (dispatch_label);
2037 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2038 sjlj_emit_function_exit ();
2039 }
2040
2041 free (lp_info);
2042}
2043
2044void
2045finish_eh_generation (void)
2046{
2047 basic_block bb;
2048
2049 /* Nothing to do if no regions created. */
2050 if (cfun->eh->region_tree == NULL)
2051 return;
2052
2053 /* The object here is to provide find_basic_blocks with detailed
2054 information (via reachable_handlers) on how exception control
2055 flows within the function. In this first pass, we can include
2056 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2057 regions, and hope that it will be useful in deleting unreachable
2058 handlers. Subsequently, we will generate landing pads which will
2059 connect many of the handlers, and then type information will not
2060 be effective. Still, this is a win over previous implementations. */
2061
2062 /* These registers are used by the landing pads. Make sure they
2063 have been generated. */
2064 get_exception_pointer ();
2065 get_exception_filter ();
2066
2067 /* Construct the landing pads. */
2068
2069 assign_filter_values ();
2070 build_post_landing_pads ();
2071 connect_post_landing_pads ();
2072 if (USING_SJLJ_EXCEPTIONS)
2073 sjlj_build_landing_pads ();
2074 else
2075 dw2_build_landing_pads ();
2076
2077 crtl->eh.built_landing_pads = 1;
2078
2079 /* We've totally changed the CFG. Start over. */
2080 find_exception_handler_labels ();
2081 break_superblocks ();
2082 if (USING_SJLJ_EXCEPTIONS
2083 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2084 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2085 commit_edge_insertions ();
2086 FOR_EACH_BB (bb)
2087 {
2088 edge e;
2089 edge_iterator ei;
2090 bool eh = false;
2091 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2092 {
2093 if (e->flags & EDGE_EH)
2094 {
2095 remove_edge (e);
2096 eh = true;
2097 }
2098 else
2099 ei_next (&ei);
2100 }
2101 if (eh)
2102 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2103 }
2104}
2105\f
2106static hashval_t
2107ehl_hash (const void *pentry)
2108{
2109 const struct ehl_map_entry *const entry
2110 = (const struct ehl_map_entry *) pentry;
2111
2112 /* 2^32 * ((sqrt(5) - 1) / 2) */
2113 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2114 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2115}
2116
2117static int
2118ehl_eq (const void *pentry, const void *pdata)
2119{
2120 const struct ehl_map_entry *const entry
2121 = (const struct ehl_map_entry *) pentry;
2122 const struct ehl_map_entry *const data
2123 = (const struct ehl_map_entry *) pdata;
2124
2125 return entry->label == data->label;
2126}
2127
2128/* This section handles removing dead code for flow. */
2129
2130/* Remove LABEL from exception_handler_label_map. */
2131
2132static void
2133remove_exception_handler_label (rtx label)
2134{
2135 struct ehl_map_entry **slot, tmp;
2136
2137 /* If exception_handler_label_map was not built yet,
2138 there is nothing to do. */
2139 if (crtl->eh.exception_handler_label_map == NULL)
2140 return;
2141
2142 tmp.label = label;
2143 slot = (struct ehl_map_entry **)
2144 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2145 gcc_assert (slot);
2146
2147 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2148}
2149
2150/* Splice REGION from the region tree etc. */
2151
2152static void
2153remove_eh_handler (struct eh_region *region)
2154{
2155 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2156 rtx lab;
2157
2158 /* For the benefit of efficiently handling REG_EH_REGION notes,
2159 replace this region in the region array with its containing
2160 region. Note that previous region deletions may result in
2161 multiple copies of this region in the array, so we have a
2162 list of alternate numbers by which we are known. */
2163
2164 outer = region->outer;
2165 VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2166 if (region->aka)
2167 {
2168 unsigned i;
2169 bitmap_iterator bi;
2170
2171 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2172 {
2173 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2174 }
2175 }
2176
2177 if (outer)
2178 {
2179 if (!outer->aka)
2180 outer->aka = BITMAP_GGC_ALLOC ();
2181 if (region->aka)
2182 bitmap_ior_into (outer->aka, region->aka);
2183 bitmap_set_bit (outer->aka, region->region_number);
2184 }
2185
2186 if (crtl->eh.built_landing_pads)
2187 lab = region->landing_pad;
2188 else
2189 lab = region->label;
2190 if (lab)
2191 remove_exception_handler_label (lab);
2192
2193 if (outer)
2194 pp_start = &outer->inner;
2195 else
2196 pp_start = &cfun->eh->region_tree;
2197 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2198 continue;
2199 *pp = region->next_peer;
2200
2201 inner = region->inner;
2202 if (inner)
2203 {
2204 for (p = inner; p->next_peer ; p = p->next_peer)
2205 p->outer = outer;
2206 p->outer = outer;
2207
2208 p->next_peer = *pp_start;
2209 *pp_start = inner;
2210 }
2211
2212 if (region->type == ERT_CATCH)
2213 {
2214 struct eh_region *eh_try, *next, *prev;
2215
2216 for (eh_try = region->next_peer;
2217 eh_try->type == ERT_CATCH;
2218 eh_try = eh_try->next_peer)
2219 continue;
2220 gcc_assert (eh_try->type == ERT_TRY);
2221
2222 next = region->u.eh_catch.next_catch;
2223 prev = region->u.eh_catch.prev_catch;
2224
2225 if (next)
2226 next->u.eh_catch.prev_catch = prev;
2227 else
2228 eh_try->u.eh_try.last_catch = prev;
2229 if (prev)
2230 prev->u.eh_catch.next_catch = next;
2231 else
2232 {
2233 eh_try->u.eh_try.eh_catch = next;
2234 if (! next)
2235 remove_eh_handler (eh_try);
2236 }
2237 }
2238}
2239
2240/* LABEL heads a basic block that is about to be deleted. If this
2241 label corresponds to an exception region, we may be able to
2242 delete the region. */
2243
2244void
2245maybe_remove_eh_handler (rtx label)
2246{
2247 struct ehl_map_entry **slot, tmp;
2248 struct eh_region *region;
2249
2250 /* ??? After generating landing pads, it's not so simple to determine
2251 if the region data is completely unused. One must examine the
2252 landing pad and the post landing pad, and whether an inner try block
2253 is referencing the catch handlers directly. */
2254 if (crtl->eh.built_landing_pads)
2255 return;
2256
2257 tmp.label = label;
2258 slot = (struct ehl_map_entry **)
2259 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2260 if (! slot)
2261 return;
2262 region = (*slot)->region;
2263 if (! region)
2264 return;
2265
2266 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2267 because there is no path to the fallback call to terminate.
2268 But the region continues to affect call-site data until there
2269 are no more contained calls, which we don't see here. */
2270 if (region->type == ERT_MUST_NOT_THROW)
2271 {
2272 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2273 region->label = NULL_RTX;
2274 }
2275 else
2276 remove_eh_handler (region);
2277}
2278
2279/* Invokes CALLBACK for every exception handler label. Only used by old
2280 loop hackery; should not be used by new code. */
2281
2282void
2283for_each_eh_label (void (*callback) (rtx))
2284{
2285 htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
2286 (void *) &callback);
2287}
2288
2289static int
2290for_each_eh_label_1 (void **pentry, void *data)
2291{
2292 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2293 void (*callback) (rtx) = *(void (**) (rtx)) data;
2294
2295 (*callback) (entry->label);
2296 return 1;
2297}
2298
2299/* Invoke CALLBACK for every exception region in the current function. */
2300
2301void
2302for_each_eh_region (void (*callback) (struct eh_region *))
2303{
2304 int i, n = cfun->eh->last_region_number;
2305 for (i = 1; i <= n; ++i)
2306 {
2307 struct eh_region *region;
2308
2309 region = VEC_index (eh_region, cfun->eh->region_array, i);
2310 if (region)
2311 (*callback) (region);
2312 }
2313}
2314\f
2315/* This section describes CFG exception edges for flow. */
2316
2317/* For communicating between calls to reachable_next_level. */
2318struct reachable_info
2319{
2320 tree types_caught;
2321 tree types_allowed;
2322 void (*callback) (struct eh_region *, void *);
2323 void *callback_data;
2324 bool saw_any_handlers;
2325};
2326
2327/* A subroutine of reachable_next_level. Return true if TYPE, or a
2328 base class of TYPE, is in HANDLED. */
2329
2330static int
2331check_handled (tree handled, tree type)
2332{
2333 tree t;
2334
2335 /* We can check for exact matches without front-end help. */
2336 if (! lang_eh_type_covers)
2337 {
2338 for (t = handled; t ; t = TREE_CHAIN (t))
2339 if (TREE_VALUE (t) == type)
2340 return 1;
2341 }
2342 else
2343 {
2344 for (t = handled; t ; t = TREE_CHAIN (t))
2345 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2346 return 1;
2347 }
2348
2349 return 0;
2350}
2351
2352/* A subroutine of reachable_next_level. If we are collecting a list
2353 of handlers, add one. After landing pad generation, reference
2354 it instead of the handlers themselves. Further, the handlers are
2355 all wired together, so by referencing one, we've got them all.
2356 Before landing pad generation we reference each handler individually.
2357
2358 LP_REGION contains the landing pad; REGION is the handler. */
2359
2360static void
2361add_reachable_handler (struct reachable_info *info,
2362 struct eh_region *lp_region, struct eh_region *region)
2363{
2364 if (! info)
2365 return;
2366
2367 info->saw_any_handlers = true;
2368
2369 if (crtl->eh.built_landing_pads)
2370 info->callback (lp_region, info->callback_data);
2371 else
2372 info->callback (region, info->callback_data);
2373}
2374
2375/* Process one level of exception regions for reachability.
2376 If TYPE_THROWN is non-null, then it is the *exact* type being
2377 propagated. If INFO is non-null, then collect handler labels
2378 and caught/allowed type information between invocations. */
2379
2380static enum reachable_code
2381reachable_next_level (struct eh_region *region, tree type_thrown,
2382 struct reachable_info *info)
2383{
2384 switch (region->type)
2385 {
2386 case ERT_CLEANUP:
2387 /* Before landing-pad generation, we model control flow
2388 directly to the individual handlers. In this way we can
2389 see that catch handler types may shadow one another. */
2390 add_reachable_handler (info, region, region);
2391 return RNL_MAYBE_CAUGHT;
2392
2393 case ERT_TRY:
2394 {
2395 struct eh_region *c;
2396 enum reachable_code ret = RNL_NOT_CAUGHT;
2397
2398 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2399 {
2400 /* A catch-all handler ends the search. */
2401 if (c->u.eh_catch.type_list == NULL)
2402 {
2403 add_reachable_handler (info, region, c);
2404 return RNL_CAUGHT;
2405 }
2406
2407 if (type_thrown)
2408 {
2409 /* If we have at least one type match, end the search. */
2410 tree tp_node = c->u.eh_catch.type_list;
2411
2412 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2413 {
2414 tree type = TREE_VALUE (tp_node);
2415
2416 if (type == type_thrown
2417 || (lang_eh_type_covers
2418 && (*lang_eh_type_covers) (type, type_thrown)))
2419 {
2420 add_reachable_handler (info, region, c);
2421 return RNL_CAUGHT;
2422 }
2423 }
2424
2425 /* If we have definitive information of a match failure,
2426 the catch won't trigger. */
2427 if (lang_eh_type_covers)
2428 return RNL_NOT_CAUGHT;
2429 }
2430
2431 /* At this point, we either don't know what type is thrown or
2432 don't have front-end assistance to help deciding if it is
2433 covered by one of the types in the list for this region.
2434
2435 We'd then like to add this region to the list of reachable
2436 handlers since it is indeed potentially reachable based on the
2437 information we have.
2438
2439 Actually, this handler is for sure not reachable if all the
2440 types it matches have already been caught. That is, it is only
2441 potentially reachable if at least one of the types it catches
2442 has not been previously caught. */
2443
2444 if (! info)
2445 ret = RNL_MAYBE_CAUGHT;
2446 else
2447 {
2448 tree tp_node = c->u.eh_catch.type_list;
2449 bool maybe_reachable = false;
2450
2451 /* Compute the potential reachability of this handler and
2452 update the list of types caught at the same time. */
2453 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2454 {
2455 tree type = TREE_VALUE (tp_node);
2456
2457 if (! check_handled (info->types_caught, type))
2458 {
2459 info->types_caught
2460 = tree_cons (NULL, type, info->types_caught);
2461
2462 maybe_reachable = true;
2463 }
2464 }
2465
2466 if (maybe_reachable)
2467 {
2468 add_reachable_handler (info, region, c);
2469
2470 /* ??? If the catch type is a base class of every allowed
2471 type, then we know we can stop the search. */
2472 ret = RNL_MAYBE_CAUGHT;
2473 }
2474 }
2475 }
2476
2477 return ret;
2478 }
2479
2480 case ERT_ALLOWED_EXCEPTIONS:
2481 /* An empty list of types definitely ends the search. */
2482 if (region->u.allowed.type_list == NULL_TREE)
2483 {
2484 add_reachable_handler (info, region, region);
2485 return RNL_CAUGHT;
2486 }
2487
2488 /* Collect a list of lists of allowed types for use in detecting
2489 when a catch may be transformed into a catch-all. */
2490 if (info)
2491 info->types_allowed = tree_cons (NULL_TREE,
2492 region->u.allowed.type_list,
2493 info->types_allowed);
2494
2495 /* If we have definitive information about the type hierarchy,
2496 then we can tell if the thrown type will pass through the
2497 filter. */
2498 if (type_thrown && lang_eh_type_covers)
2499 {
2500 if (check_handled (region->u.allowed.type_list, type_thrown))
2501 return RNL_NOT_CAUGHT;
2502 else
2503 {
2504 add_reachable_handler (info, region, region);
2505 return RNL_CAUGHT;
2506 }
2507 }
2508
2509 add_reachable_handler (info, region, region);
2510 return RNL_MAYBE_CAUGHT;
2511
2512 case ERT_CATCH:
2513 /* Catch regions are handled by their controlling try region. */
2514 return RNL_NOT_CAUGHT;
2515
2516 case ERT_MUST_NOT_THROW:
2517 /* Here we end our search, since no exceptions may propagate.
2518 If we've touched down at some landing pad previous, then the
2519 explicit function call we generated may be used. Otherwise
2520 the call is made by the runtime.
2521
2522 Before inlining, do not perform this optimization. We may
2523 inline a subroutine that contains handlers, and that will
2524 change the value of saw_any_handlers. */
2525
2526 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2527 {
2528 add_reachable_handler (info, region, region);
2529 return RNL_CAUGHT;
2530 }
2531 else
2532 return RNL_BLOCKED;
2533
2534 case ERT_THROW:
2535 case ERT_UNKNOWN:
2536 /* Shouldn't see these here. */
2537 gcc_unreachable ();
2538 break;
2539 default:
2540 gcc_unreachable ();
2541 }
2542}
2543
2544/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2545
2546void
2547foreach_reachable_handler (int region_number, bool is_resx,
2548 void (*callback) (struct eh_region *, void *),
2549 void *callback_data)
2550{
2551 struct reachable_info info;
2552 struct eh_region *region;
2553 tree type_thrown;
2554
2555 memset (&info, 0, sizeof (info));
2556 info.callback = callback;
2557 info.callback_data = callback_data;
2558
2559 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2560
2561 type_thrown = NULL_TREE;
2562 if (is_resx)
2563 {
2564 /* A RESX leaves a region instead of entering it. Thus the
2565 region itself may have been deleted out from under us. */
2566 if (region == NULL)
2567 return;
2568 region = region->outer;
2569 }
2570 else if (region->type == ERT_THROW)
2571 {
2572 type_thrown = region->u.eh_throw.type;
2573 region = region->outer;
2574 }
2575
2576 while (region)
2577 {
2578 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2579 break;
2580 /* If we have processed one cleanup, there is no point in
2581 processing any more of them. Each cleanup will have an edge
2582 to the next outer cleanup region, so the flow graph will be
2583 accurate. */
2584 if (region->type == ERT_CLEANUP)
2585 region = region->u.cleanup.prev_try;
2586 else
2587 region = region->outer;
2588 }
2589}
2590
2591/* Retrieve a list of labels of exception handlers which can be
2592 reached by a given insn. */
2593
2594static void
2595arh_to_landing_pad (struct eh_region *region, void *data)
2596{
2597 rtx *p_handlers = (rtx *) data;
2598 if (! *p_handlers)
2599 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2600}
2601
2602static void
2603arh_to_label (struct eh_region *region, void *data)
2604{
2605 rtx *p_handlers = (rtx *) data;
2606 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2607}
2608
2609rtx
2610reachable_handlers (rtx insn)
2611{
2612 bool is_resx = false;
2613 rtx handlers = NULL;
2614 int region_number;
2615
2616 if (JUMP_P (insn)
2617 && GET_CODE (PATTERN (insn)) == RESX)
2618 {
2619 region_number = XINT (PATTERN (insn), 0);
2620 is_resx = true;
2621 }
2622 else
2623 {
2624 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2625 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2626 return NULL;
2627 region_number = INTVAL (XEXP (note, 0));
2628 }
2629
2630 foreach_reachable_handler (region_number, is_resx,
2631 (crtl->eh.built_landing_pads
2632 ? arh_to_landing_pad
2633 : arh_to_label),
2634 &handlers);
2635
2636 return handlers;
2637}
2638
2639/* Determine if the given INSN can throw an exception that is caught
2640 within the function. */
2641
2642bool
2643can_throw_internal_1 (int region_number, bool is_resx)
2644{
2645 struct eh_region *region;
2646 tree type_thrown;
2647
2648 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2649
2650 type_thrown = NULL_TREE;
2651 if (is_resx)
2652 region = region->outer;
2653 else if (region->type == ERT_THROW)
2654 {
2655 type_thrown = region->u.eh_throw.type;
2656 region = region->outer;
2657 }
2658
2659 /* If this exception is ignored by each and every containing region,
2660 then control passes straight out. The runtime may handle some
2661 regions, which also do not require processing internally. */
2662 for (; region; region = region->outer)
2663 {
2664 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2665 if (how == RNL_BLOCKED)
2666 return false;
2667 if (how != RNL_NOT_CAUGHT)
2668 return true;
2669 }
2670
2671 return false;
2672}
2673
2674bool
2675can_throw_internal (const_rtx insn)
2676{
2677 rtx note;
2678
2679 if (! INSN_P (insn))
2680 return false;
2681
2682 if (JUMP_P (insn)
2683 && GET_CODE (PATTERN (insn)) == RESX
2684 && XINT (PATTERN (insn), 0) > 0)
2685 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2686
2687 if (NONJUMP_INSN_P (insn)
2688 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2689 insn = XVECEXP (PATTERN (insn), 0, 0);
2690
2691 /* Every insn that might throw has an EH_REGION note. */
2692 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2693 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2694 return false;
2695
2696 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2697}
2698
2699/* Determine if the given INSN can throw an exception that is
2700 visible outside the function. */
2701
2702bool
2703can_throw_external_1 (int region_number, bool is_resx)
2704{
2705 struct eh_region *region;
2706 tree type_thrown;
2707
2708 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2709
2710 type_thrown = NULL_TREE;
2711 if (is_resx)
2712 region = region->outer;
2713 else if (region->type == ERT_THROW)
2714 {
2715 type_thrown = region->u.eh_throw.type;
2716 region = region->outer;
2717 }
2718
2719 /* If the exception is caught or blocked by any containing region,
2720 then it is not seen by any calling function. */
2721 for (; region ; region = region->outer)
2722 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2723 return false;
2724
2725 return true;
2726}
2727
2728bool
2729can_throw_external (const_rtx insn)
2730{
2731 rtx note;
2732
2733 if (! INSN_P (insn))
2734 return false;
2735
2736 if (JUMP_P (insn)
2737 && GET_CODE (PATTERN (insn)) == RESX
2738 && XINT (PATTERN (insn), 0) > 0)
2739 return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2740
2741 if (NONJUMP_INSN_P (insn)
2742 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2743 insn = XVECEXP (PATTERN (insn), 0, 0);
2744
2745 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2746 if (!note)
2747 {
2748 /* Calls (and trapping insns) without notes are outside any
2749 exception handling region in this function. We have to
2750 assume it might throw. Given that the front end and middle
2751 ends mark known NOTHROW functions, this isn't so wildly
2752 inaccurate. */
2753 return (CALL_P (insn)
2754 || (flag_non_call_exceptions
2755 && may_trap_p (PATTERN (insn))));
2756 }
2757 if (INTVAL (XEXP (note, 0)) <= 0)
2758 return false;
2759
2760 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2761}
2762
2763/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2764
2765unsigned int
2766set_nothrow_function_flags (void)
2767{
2768 rtx insn;
2769
2770 /* If we don't know that this implementation of the function will
2771 actually be used, then we must not set TREE_NOTHROW, since
2772 callers must not assume that this function does not throw. */
2773 if (DECL_REPLACEABLE_P (current_function_decl))
2774 return 0;
2775
2776 TREE_NOTHROW (current_function_decl) = 1;
2777
2778 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2779 something that can throw an exception. We specifically exempt
2780 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2781 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2782 is optimistic. */
2783
2784 crtl->all_throwers_are_sibcalls = 1;
2785
2786 if (! flag_exceptions)
2787 return 0;
2788
2789 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2790 if (can_throw_external (insn))
2791 {
2792 TREE_NOTHROW (current_function_decl) = 0;
2793
2794 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2795 {
2796 crtl->all_throwers_are_sibcalls = 0;
2797 return 0;
2798 }
2799 }
2800
2801 for (insn = crtl->epilogue_delay_list; insn;
2802 insn = XEXP (insn, 1))
2803 if (can_throw_external (insn))
2804 {
2805 TREE_NOTHROW (current_function_decl) = 0;
2806
2807 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2808 {
2809 crtl->all_throwers_are_sibcalls = 0;
2810 return 0;
2811 }
2812 }
2813 return 0;
2814}
2815
2816struct rtl_opt_pass pass_set_nothrow_function_flags =
2817{
2818 {
2819 RTL_PASS,
2820 NULL, /* name */
2821 NULL, /* gate */
2822 set_nothrow_function_flags, /* execute */
2823 NULL, /* sub */
2824 NULL, /* next */
2825 0, /* static_pass_number */
2826 0, /* tv_id */
2827 0, /* properties_required */
2828 0, /* properties_provided */
2829 0, /* properties_destroyed */
2830 0, /* todo_flags_start */
2831 0, /* todo_flags_finish */
2832 }
2833};
2834
2835\f
2836/* Various hooks for unwind library. */
2837
2838/* Do any necessary initialization to access arbitrary stack frames.
2839 On the SPARC, this means flushing the register windows. */
2840
2841void
2842expand_builtin_unwind_init (void)
2843{
2844 /* Set this so all the registers get saved in our frame; we need to be
2845 able to copy the saved values for any registers from frames we unwind. */
2846 crtl->saves_all_registers = 1;
2847
2848#ifdef SETUP_FRAME_ADDRESSES
2849 SETUP_FRAME_ADDRESSES ();
2850#endif
2851}
2852
2853rtx
2854expand_builtin_eh_return_data_regno (tree exp)
2855{
2856 tree which = CALL_EXPR_ARG (exp, 0);
2857 unsigned HOST_WIDE_INT iwhich;
2858
2859 if (TREE_CODE (which) != INTEGER_CST)
2860 {
2861 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2862 return constm1_rtx;
2863 }
2864
2865 iwhich = tree_low_cst (which, 1);
2866 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2867 if (iwhich == INVALID_REGNUM)
2868 return constm1_rtx;
2869
2870#ifdef DWARF_FRAME_REGNUM
2871 iwhich = DWARF_FRAME_REGNUM (iwhich);
2872#else
2873 iwhich = DBX_REGISTER_NUMBER (iwhich);
2874#endif
2875
2876 return GEN_INT (iwhich);
2877}
2878
2879/* Given a value extracted from the return address register or stack slot,
2880 return the actual address encoded in that value. */
2881
2882rtx
2883expand_builtin_extract_return_addr (tree addr_tree)
2884{
2885 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2886
2887 if (GET_MODE (addr) != Pmode
2888 && GET_MODE (addr) != VOIDmode)
2889 {
2890#ifdef POINTERS_EXTEND_UNSIGNED
2891 addr = convert_memory_address (Pmode, addr);
2892#else
2893 addr = convert_to_mode (Pmode, addr, 0);
2894#endif
2895 }
2896
2897 /* First mask out any unwanted bits. */
2898#ifdef MASK_RETURN_ADDR
2899 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2900#endif
2901
2902 /* Then adjust to find the real return address. */
2903#if defined (RETURN_ADDR_OFFSET)
2904 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2905#endif
2906
2907 return addr;
2908}
2909
2910/* Given an actual address in addr_tree, do any necessary encoding
2911 and return the value to be stored in the return address register or
2912 stack slot so the epilogue will return to that address. */
2913
2914rtx
2915expand_builtin_frob_return_addr (tree addr_tree)
2916{
2917 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2918
2919 addr = convert_memory_address (Pmode, addr);
2920
2921#ifdef RETURN_ADDR_OFFSET
2922 addr = force_reg (Pmode, addr);
2923 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2924#endif
2925
2926 return addr;
2927}
2928
2929/* Set up the epilogue with the magic bits we'll need to return to the
2930 exception handler. */
2931
2932void
2933expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2934 tree handler_tree)
2935{
2936 rtx tmp;
2937
2938#ifdef EH_RETURN_STACKADJ_RTX
2939 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2940 VOIDmode, EXPAND_NORMAL);
2941 tmp = convert_memory_address (Pmode, tmp);
2942 if (!crtl->eh.ehr_stackadj)
2943 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2944 else if (tmp != crtl->eh.ehr_stackadj)
2945 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2946#endif
2947
2948 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2949 VOIDmode, EXPAND_NORMAL);
2950 tmp = convert_memory_address (Pmode, tmp);
2951 if (!crtl->eh.ehr_handler)
2952 crtl->eh.ehr_handler = copy_to_reg (tmp);
2953 else if (tmp != crtl->eh.ehr_handler)
2954 emit_move_insn (crtl->eh.ehr_handler, tmp);
2955
2956 if (!crtl->eh.ehr_label)
2957 crtl->eh.ehr_label = gen_label_rtx ();
2958 emit_jump (crtl->eh.ehr_label);
2959}
2960
2961void
2962expand_eh_return (void)
2963{
2964 rtx around_label;
2965
2966 if (! crtl->eh.ehr_label)
2967 return;
2968
2969 crtl->calls_eh_return = 1;
2970
2971#ifdef EH_RETURN_STACKADJ_RTX
2972 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2973#endif
2974
2975 around_label = gen_label_rtx ();
2976 emit_jump (around_label);
2977
2978 emit_label (crtl->eh.ehr_label);
2979 clobber_return_register ();
2980
2981#ifdef EH_RETURN_STACKADJ_RTX
2982 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2983#endif
2984
2985#ifdef HAVE_eh_return
2986 if (HAVE_eh_return)
2987 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2988 else
2989#endif
2990 {
2991#ifdef EH_RETURN_HANDLER_RTX
2992 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2993#else
2994 error ("__builtin_eh_return not supported on this target");
2995#endif
2996 }
2997
2998 emit_label (around_label);
2999}
3000
3001/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3002 POINTERS_EXTEND_UNSIGNED and return it. */
3003
3004rtx
3005expand_builtin_extend_pointer (tree addr_tree)
3006{
3007 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3008 int extend;
3009
3010#ifdef POINTERS_EXTEND_UNSIGNED
3011 extend = POINTERS_EXTEND_UNSIGNED;
3012#else
3013 /* The previous EH code did an unsigned extend by default, so we do this also
3014 for consistency. */
3015 extend = 1;
3016#endif
3017
3018 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3019}
3020\f
3021/* In the following functions, we represent entries in the action table
3022 as 1-based indices. Special cases are:
3023
3024 0: null action record, non-null landing pad; implies cleanups
3025 -1: null action record, null landing pad; implies no action
3026 -2: no call-site entry; implies must_not_throw
3027 -3: we have yet to process outer regions
3028
3029 Further, no special cases apply to the "next" field of the record.
3030 For next, 0 means end of list. */
3031
3032struct action_record
3033{
3034 int offset;
3035 int filter;
3036 int next;
3037};
3038
3039static int
3040action_record_eq (const void *pentry, const void *pdata)
3041{
3042 const struct action_record *entry = (const struct action_record *) pentry;
3043 const struct action_record *data = (const struct action_record *) pdata;
3044 return entry->filter == data->filter && entry->next == data->next;
3045}
3046
3047static hashval_t
3048action_record_hash (const void *pentry)
3049{
3050 const struct action_record *entry = (const struct action_record *) pentry;
3051 return entry->next * 1009 + entry->filter;
3052}
3053
3054static int
3055add_action_record (htab_t ar_hash, int filter, int next)
3056{
3057 struct action_record **slot, *new_ar, tmp;
3058
3059 tmp.filter = filter;
3060 tmp.next = next;
3061 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3062
3063 if ((new_ar = *slot) == NULL)
3064 {
3065 new_ar = XNEW (struct action_record);
3066 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3067 new_ar->filter = filter;
3068 new_ar->next = next;
3069 *slot = new_ar;
3070
3071 /* The filter value goes in untouched. The link to the next
3072 record is a "self-relative" byte offset, or zero to indicate
3073 that there is no next record. So convert the absolute 1 based
3074 indices we've been carrying around into a displacement. */
3075
3076 push_sleb128 (&crtl->eh.action_record_data, filter);
3077 if (next)
3078 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3079 push_sleb128 (&crtl->eh.action_record_data, next);
3080 }
3081
3082 return new_ar->offset;
3083}
3084
3085static int
3086collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3087{
3088 struct eh_region *c;
3089 int next;
3090
3091 /* If we've reached the top of the region chain, then we have
3092 no actions, and require no landing pad. */
3093 if (region == NULL)
3094 return -1;
3095
3096 switch (region->type)
3097 {
3098 case ERT_CLEANUP:
3099 /* A cleanup adds a zero filter to the beginning of the chain, but
3100 there are special cases to look out for. If there are *only*
3101 cleanups along a path, then it compresses to a zero action.
3102 Further, if there are multiple cleanups along a path, we only
3103 need to represent one of them, as that is enough to trigger
3104 entry to the landing pad at runtime. */
3105 next = collect_one_action_chain (ar_hash, region->outer);
3106 if (next <= 0)
3107 return 0;
3108 for (c = region->outer; c ; c = c->outer)
3109 if (c->type == ERT_CLEANUP)
3110 return next;
3111 return add_action_record (ar_hash, 0, next);
3112
3113 case ERT_TRY:
3114 /* Process the associated catch regions in reverse order.
3115 If there's a catch-all handler, then we don't need to
3116 search outer regions. Use a magic -3 value to record
3117 that we haven't done the outer search. */
3118 next = -3;
3119 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3120 {
3121 if (c->u.eh_catch.type_list == NULL)
3122 {
3123 /* Retrieve the filter from the head of the filter list
3124 where we have stored it (see assign_filter_values). */
3125 int filter
3126 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3127
3128 next = add_action_record (ar_hash, filter, 0);
3129 }
3130 else
3131 {
3132 /* Once the outer search is done, trigger an action record for
3133 each filter we have. */
3134 tree flt_node;
3135
3136 if (next == -3)
3137 {
3138 next = collect_one_action_chain (ar_hash, region->outer);
3139
3140 /* If there is no next action, terminate the chain. */
3141 if (next == -1)
3142 next = 0;
3143 /* If all outer actions are cleanups or must_not_throw,
3144 we'll have no action record for it, since we had wanted
3145 to encode these states in the call-site record directly.
3146 Add a cleanup action to the chain to catch these. */
3147 else if (next <= 0)
3148 next = add_action_record (ar_hash, 0, 0);
3149 }
3150
3151 flt_node = c->u.eh_catch.filter_list;
3152 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3153 {
3154 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3155 next = add_action_record (ar_hash, filter, next);
3156 }
3157 }
3158 }
3159 return next;
3160
3161 case ERT_ALLOWED_EXCEPTIONS:
3162 /* An exception specification adds its filter to the
3163 beginning of the chain. */
3164 next = collect_one_action_chain (ar_hash, region->outer);
3165
3166 /* If there is no next action, terminate the chain. */
3167 if (next == -1)
3168 next = 0;
3169 /* If all outer actions are cleanups or must_not_throw,
3170 we'll have no action record for it, since we had wanted
3171 to encode these states in the call-site record directly.
3172 Add a cleanup action to the chain to catch these. */
3173 else if (next <= 0)
3174 next = add_action_record (ar_hash, 0, 0);
3175
3176 return add_action_record (ar_hash, region->u.allowed.filter, next);
3177
3178 case ERT_MUST_NOT_THROW:
3179 /* A must-not-throw region with no inner handlers or cleanups
3180 requires no call-site entry. Note that this differs from
3181 the no handler or cleanup case in that we do require an lsda
3182 to be generated. Return a magic -2 value to record this. */
3183 return -2;
3184
3185 case ERT_CATCH:
3186 case ERT_THROW:
3187 /* CATCH regions are handled in TRY above. THROW regions are
3188 for optimization information only and produce no output. */
3189 return collect_one_action_chain (ar_hash, region->outer);
3190
3191 default:
3192 gcc_unreachable ();
3193 }
3194}
3195
3196static int
3197add_call_site (rtx landing_pad, int action)
3198{
3199 call_site_record record;
3200
3201 record = GGC_NEW (struct call_site_record);
3202 record->landing_pad = landing_pad;
3203 record->action = action;
3204
3205 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3206
3207 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3208}
3209
3210/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3211 The new note numbers will not refer to region numbers, but
3212 instead to call site entries. */
3213
3214unsigned int
3215convert_to_eh_region_ranges (void)
3216{
3217 rtx insn, iter, note;
3218 htab_t ar_hash;
3219 int last_action = -3;
3220 rtx last_action_insn = NULL_RTX;
3221 rtx last_landing_pad = NULL_RTX;
3222 rtx first_no_action_insn = NULL_RTX;
3223 int call_site = 0;
3224
3225 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3226 return 0;
3227
3228 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3229
3230 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3231
3232 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3233 if (INSN_P (iter))
3234 {
3235 struct eh_region *region;
3236 int this_action;
3237 rtx this_landing_pad;
3238
3239 insn = iter;
3240 if (NONJUMP_INSN_P (insn)
3241 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3242 insn = XVECEXP (PATTERN (insn), 0, 0);
3243
3244 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3245 if (!note)
3246 {
3247 if (! (CALL_P (insn)
3248 || (flag_non_call_exceptions
3249 && may_trap_p (PATTERN (insn)))))
3250 continue;
3251 this_action = -1;
3252 region = NULL;
3253 }
3254 else
3255 {
3256 if (INTVAL (XEXP (note, 0)) <= 0)
3257 continue;
3258 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3259 this_action = collect_one_action_chain (ar_hash, region);
3260 }
3261
3262 /* Existence of catch handlers, or must-not-throw regions
3263 implies that an lsda is needed (even if empty). */
3264 if (this_action != -1)
3265 crtl->uses_eh_lsda = 1;
3266
3267 /* Delay creation of region notes for no-action regions
3268 until we're sure that an lsda will be required. */
3269 else if (last_action == -3)
3270 {
3271 first_no_action_insn = iter;
3272 last_action = -1;
3273 }
3274
3275 /* Cleanups and handlers may share action chains but not
3276 landing pads. Collect the landing pad for this region. */
3277 if (this_action >= 0)
3278 {
3279 struct eh_region *o;
3280 for (o = region; ! o->landing_pad ; o = o->outer)
3281 continue;
3282 this_landing_pad = o->landing_pad;
3283 }
3284 else
3285 this_landing_pad = NULL_RTX;
3286
3287 /* Differing actions or landing pads implies a change in call-site
3288 info, which implies some EH_REGION note should be emitted. */
3289 if (last_action != this_action
3290 || last_landing_pad != this_landing_pad)
3291 {
3292 /* If we'd not seen a previous action (-3) or the previous
3293 action was must-not-throw (-2), then we do not need an
3294 end note. */
3295 if (last_action >= -1)
3296 {
3297 /* If we delayed the creation of the begin, do it now. */
3298 if (first_no_action_insn)
3299 {
3300 call_site = add_call_site (NULL_RTX, 0);
3301 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3302 first_no_action_insn);
3303 NOTE_EH_HANDLER (note) = call_site;
3304 first_no_action_insn = NULL_RTX;
3305 }
3306
3307 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3308 last_action_insn);
3309 NOTE_EH_HANDLER (note) = call_site;
3310 }
3311
3312 /* If the new action is must-not-throw, then no region notes
3313 are created. */
3314 if (this_action >= -1)
3315 {
3316 call_site = add_call_site (this_landing_pad,
3317 this_action < 0 ? 0 : this_action);
3318 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3319 NOTE_EH_HANDLER (note) = call_site;
3320 }
3321
3322 last_action = this_action;
3323 last_landing_pad = this_landing_pad;
3324 }
3325 last_action_insn = iter;
3326 }
3327
3328 if (last_action >= -1 && ! first_no_action_insn)
3329 {
3330 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3331 NOTE_EH_HANDLER (note) = call_site;
3332 }
3333
3334 htab_delete (ar_hash);
3335 return 0;
3336}
3337
3338struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3339{
3340 {
3341 RTL_PASS,
3342 "eh_ranges", /* name */
3343 NULL, /* gate */
3344 convert_to_eh_region_ranges, /* execute */
3345 NULL, /* sub */
3346 NULL, /* next */
3347 0, /* static_pass_number */
3348 0, /* tv_id */
3349 0, /* properties_required */
3350 0, /* properties_provided */
3351 0, /* properties_destroyed */
3352 0, /* todo_flags_start */
3353 TODO_dump_func, /* todo_flags_finish */
3354 }
3355};
3356
3357\f
3358static void
3359push_uleb128 (varray_type *data_area, unsigned int value)
3360{
3361 do
3362 {
3363 unsigned char byte = value & 0x7f;
3364 value >>= 7;
3365 if (value)
3366 byte |= 0x80;
3367 VARRAY_PUSH_UCHAR (*data_area, byte);
3368 }
3369 while (value);
3370}
3371
3372static void
3373push_sleb128 (varray_type *data_area, int value)
3374{
3375 unsigned char byte;
3376 int more;
3377
3378 do
3379 {
3380 byte = value & 0x7f;
3381 value >>= 7;
3382 more = ! ((value == 0 && (byte & 0x40) == 0)
3383 || (value == -1 && (byte & 0x40) != 0));
3384 if (more)
3385 byte |= 0x80;
3386 VARRAY_PUSH_UCHAR (*data_area, byte);
3387 }
3388 while (more);
3389}
3390
3391\f
3392#ifndef HAVE_AS_LEB128
3393static int
3394dw2_size_of_call_site_table (void)
3395{
3396 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3397 int size = n * (4 + 4 + 4);
3398 int i;
3399
3400 for (i = 0; i < n; ++i)
3401 {
3402 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3403 size += size_of_uleb128 (cs->action);
3404 }
3405
3406 return size;
3407}
3408
3409static int
3410sjlj_size_of_call_site_table (void)
3411{
3412 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3413 int size = 0;
3414 int i;
3415
3416 for (i = 0; i < n; ++i)
3417 {
3418 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3419 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3420 size += size_of_uleb128 (cs->action);
3421 }
3422
3423 return size;
3424}
3425#endif
3426
3427static void
3428dw2_output_call_site_table (void)
3429{
3430 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3431 int i;
3432
3433 for (i = 0; i < n; ++i)
3434 {
3435 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3436 char reg_start_lab[32];
3437 char reg_end_lab[32];
3438 char landing_pad_lab[32];
3439
3440 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3441 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3442
3443 if (cs->landing_pad)
3444 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3445 CODE_LABEL_NUMBER (cs->landing_pad));
3446
3447 /* ??? Perhaps use insn length scaling if the assembler supports
3448 generic arithmetic. */
3449 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3450 data4 if the function is small enough. */
3451#ifdef HAVE_AS_LEB128
3452 dw2_asm_output_delta_uleb128 (reg_start_lab,
3453 current_function_func_begin_label,
3454 "region %d start", i);
3455 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3456 "length");
3457 if (cs->landing_pad)
3458 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3459 current_function_func_begin_label,
3460 "landing pad");
3461 else
3462 dw2_asm_output_data_uleb128 (0, "landing pad");
3463#else
3464 dw2_asm_output_delta (4, reg_start_lab,
3465 current_function_func_begin_label,
3466 "region %d start", i);
3467 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3468 if (cs->landing_pad)
3469 dw2_asm_output_delta (4, landing_pad_lab,
3470 current_function_func_begin_label,
3471 "landing pad");
3472 else
3473 dw2_asm_output_data (4, 0, "landing pad");
3474#endif
3475 dw2_asm_output_data_uleb128 (cs->action, "action");
3476 }
3477
3478 call_site_base += n;
3479}
3480
3481static void
3482sjlj_output_call_site_table (void)
3483{
3484 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3485 int i;
3486
3487 for (i = 0; i < n; ++i)
3488 {
3489 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3490
3491 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3492 "region %d landing pad", i);
3493 dw2_asm_output_data_uleb128 (cs->action, "action");
3494 }
3495
3496 call_site_base += n;
3497}
3498
3499#ifndef TARGET_UNWIND_INFO
3500/* Switch to the section that should be used for exception tables. */
3501
3502static void
3503switch_to_exception_section (const char * ARG_UNUSED (fnname))
3504{
3505 section *s;
3506
3507 if (exception_section)
3508 s = exception_section;
3509 else
3510 {
3511 /* Compute the section and cache it into exception_section,
3512 unless it depends on the function name. */
3513 if (targetm.have_named_sections)
3514 {
3515 int flags;
3516
3517 if (EH_TABLES_CAN_BE_READ_ONLY)
3518 {
3519 int tt_format =
3520 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3521 flags = ((! flag_pic
3522 || ((tt_format & 0x70) != DW_EH_PE_absptr
3523 && (tt_format & 0x70) != DW_EH_PE_aligned))
3524 ? 0 : SECTION_WRITE);
3525 }
3526 else
3527 flags = SECTION_WRITE;
3528
3529#ifdef HAVE_LD_EH_GC_SECTIONS
3530 if (flag_function_sections)
3531 {
3532 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3533 sprintf (section_name, ".gcc_except_table.%s", fnname);
3534 s = get_section (section_name, flags, NULL);
3535 free (section_name);
3536 }
3537 else
3538#endif
3539 exception_section
3540 = s = get_section (".gcc_except_table", flags, NULL);
3541 }
3542 else
3543 exception_section
3544 = s = flag_pic ? data_section : readonly_data_section;
3545 }
3546
3547 switch_to_section (s);
3548}
3549#endif
3550
3551
3552/* Output a reference from an exception table to the type_info object TYPE.
3553 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3554 the value. */
3555
3556static void
3557output_ttype (tree type, int tt_format, int tt_format_size)
3558{
3559 rtx value;
3560 bool is_public = true;
3561
3562 if (type == NULL_TREE)
3563 value = const0_rtx;
3564 else
3565 {
3566 struct varpool_node *node;
3567
3568 type = lookup_type_for_runtime (type);
3569 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3570
3571 /* Let cgraph know that the rtti decl is used. Not all of the
3572 paths below go through assemble_integer, which would take
3573 care of this for us. */
3574 STRIP_NOPS (type);
3575 if (TREE_CODE (type) == ADDR_EXPR)
3576 {
3577 type = TREE_OPERAND (type, 0);
3578 if (TREE_CODE (type) == VAR_DECL)
3579 {
3580 node = varpool_node (type);
3581 if (node)
3582 varpool_mark_needed_node (node);
3583 is_public = TREE_PUBLIC (type);
3584 }
3585 }
3586 else
3587 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3588 }
3589
3590 /* Allow the target to override the type table entry format. */
3591 if (targetm.asm_out.ttype (value))
3592 return;
3593
3594 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3595 assemble_integer (value, tt_format_size,
3596 tt_format_size * BITS_PER_UNIT, 1);
3597 else
3598 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3599}
3600
3601void
3602output_function_exception_table (const char * ARG_UNUSED (fnname))
3603{
3604 int tt_format, cs_format, lp_format, i, n;
3605#ifdef HAVE_AS_LEB128
3606 char ttype_label[32];
3607 char cs_after_size_label[32];
3608 char cs_end_label[32];
3609#else
3610 int call_site_len;
3611#endif
3612 int have_tt_data;
3613 int tt_format_size = 0;
3614
3615 /* Not all functions need anything. */
3616 if (! crtl->uses_eh_lsda)
3617 return;
3618
3619 if (eh_personality_libfunc)
3620 assemble_external_libcall (eh_personality_libfunc);
3621
3622#ifdef TARGET_UNWIND_INFO
3623 /* TODO: Move this into target file. */
3624 fputs ("\t.personality\t", asm_out_file);
3625 output_addr_const (asm_out_file, eh_personality_libfunc);
3626 fputs ("\n\t.handlerdata\n", asm_out_file);
3627 /* Note that varasm still thinks we're in the function's code section.
3628 The ".endp" directive that will immediately follow will take us back. */
3629#else
3630 switch_to_exception_section (fnname);
3631#endif
3632
3633 /* If the target wants a label to begin the table, emit it here. */
3634 targetm.asm_out.except_table_label (asm_out_file);
3635
3636 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3637 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3638
3639 /* Indicate the format of the @TType entries. */
3640 if (! have_tt_data)
3641 tt_format = DW_EH_PE_omit;
3642 else
3643 {
3644 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3645#ifdef HAVE_AS_LEB128
3646 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3647 current_function_funcdef_no);
3648#endif
3649 tt_format_size = size_of_encoded_value (tt_format);
3650
3651 assemble_align (tt_format_size * BITS_PER_UNIT);
3652 }
3653
3654 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3655 current_function_funcdef_no);
3656
3657 /* The LSDA header. */
3658
3659 /* Indicate the format of the landing pad start pointer. An omitted
3660 field implies @LPStart == @Start. */
3661 /* Currently we always put @LPStart == @Start. This field would
3662 be most useful in moving the landing pads completely out of
3663 line to another section, but it could also be used to minimize
3664 the size of uleb128 landing pad offsets. */
3665 lp_format = DW_EH_PE_omit;
3666 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3667 eh_data_format_name (lp_format));
3668
3669 /* @LPStart pointer would go here. */
3670
3671 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3672 eh_data_format_name (tt_format));
3673
3674#ifndef HAVE_AS_LEB128
3675 if (USING_SJLJ_EXCEPTIONS)
3676 call_site_len = sjlj_size_of_call_site_table ();
3677 else
3678 call_site_len = dw2_size_of_call_site_table ();
3679#endif
3680
3681 /* A pc-relative 4-byte displacement to the @TType data. */
3682 if (have_tt_data)
3683 {
3684#ifdef HAVE_AS_LEB128
3685 char ttype_after_disp_label[32];
3686 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3687 current_function_funcdef_no);
3688 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3689 "@TType base offset");
3690 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3691#else
3692 /* Ug. Alignment queers things. */
3693 unsigned int before_disp, after_disp, last_disp, disp;
3694
3695 before_disp = 1 + 1;
3696 after_disp = (1 + size_of_uleb128 (call_site_len)
3697 + call_site_len
3698 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3699 + (VEC_length (tree, crtl->eh.ttype_data)
3700 * tt_format_size));
3701
3702 disp = after_disp;
3703 do
3704 {
3705 unsigned int disp_size, pad;
3706
3707 last_disp = disp;
3708 disp_size = size_of_uleb128 (disp);
3709 pad = before_disp + disp_size + after_disp;
3710 if (pad % tt_format_size)
3711 pad = tt_format_size - (pad % tt_format_size);
3712 else
3713 pad = 0;
3714 disp = after_disp + pad;
3715 }
3716 while (disp != last_disp);
3717
3718 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3719#endif
3720 }
3721
3722 /* Indicate the format of the call-site offsets. */
3723#ifdef HAVE_AS_LEB128
3724 cs_format = DW_EH_PE_uleb128;
3725#else
3726 cs_format = DW_EH_PE_udata4;
3727#endif
3728 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3729 eh_data_format_name (cs_format));
3730
3731#ifdef HAVE_AS_LEB128
3732 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3733 current_function_funcdef_no);
3734 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3735 current_function_funcdef_no);
3736 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3737 "Call-site table length");
3738 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3739 if (USING_SJLJ_EXCEPTIONS)
3740 sjlj_output_call_site_table ();
3741 else
3742 dw2_output_call_site_table ();
3743 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3744#else
3745 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3746 if (USING_SJLJ_EXCEPTIONS)
3747 sjlj_output_call_site_table ();
3748 else
3749 dw2_output_call_site_table ();
3750#endif
3751
3752 /* ??? Decode and interpret the data for flag_debug_asm. */
3753 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3754 for (i = 0; i < n; ++i)
3755 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3756 (i ? NULL : "Action record table"));
3757
3758 if (have_tt_data)
3759 assemble_align (tt_format_size * BITS_PER_UNIT);
3760
3761 i = VEC_length (tree, crtl->eh.ttype_data);
3762 while (i-- > 0)
3763 {
3764 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3765 output_ttype (type, tt_format, tt_format_size);
3766 }
3767
3768#ifdef HAVE_AS_LEB128
3769 if (have_tt_data)
3770 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3771#endif
3772
3773 /* ??? Decode and interpret the data for flag_debug_asm. */
3774 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3775 for (i = 0; i < n; ++i)
3776 {
3777 if (targetm.arm_eabi_unwinder)
3778 {
3779 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3780 output_ttype (type, tt_format, tt_format_size);
3781 }
3782 else
3783 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3784 (i ? NULL : "Exception specification table"));
3785 }
3786
3787 switch_to_section (current_function_section ());
3788}
3789
3790void
3791set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3792{
3793 fun->eh->throw_stmt_table = table;
3794}
3795
3796htab_t
3797get_eh_throw_stmt_table (struct function *fun)
3798{
3799 return fun->eh->throw_stmt_table;
3800}
3801
3802/* Dump EH information to OUT. */
3803void
3804dump_eh_tree (FILE *out, struct function *fun)
3805{
3806 struct eh_region *i;
3807 int depth = 0;
3808 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3809 "allowed_exceptions", "must_not_throw",
3810 "throw"};
3811
3812 i = fun->eh->region_tree;
3813 if (! i)
3814 return;
3815
3816 fprintf (out, "Eh tree:\n");
3817 while (1)
3818 {
3819 fprintf (out, " %*s %i %s", depth * 2, "",
3820 i->region_number, type_name [(int)i->type]);
3821 if (i->tree_label)
3822 {
3823 fprintf (out, " tree_label:");
3824 print_generic_expr (out, i->tree_label, 0);
3825 }
3826 fprintf (out, "\n");
3827 /* If there are sub-regions, process them. */
3828 if (i->inner)
3829 i = i->inner, depth++;
3830 /* If there are peers, process them. */
3831 else if (i->next_peer)
3832 i = i->next_peer;
3833 /* Otherwise, step back up the tree to the next peer. */
3834 else
3835 {
3836 do {
3837 i = i->outer;
3838 depth--;
3839 if (i == NULL)
3840 return;
3841 } while (i->next_peer == NULL);
3842 i = i->next_peer;
3843 }
3844 }
3845}
3846
3847/* Verify some basic invariants on EH datastructures. Could be extended to
3848 catch more. */
3849void
3850verify_eh_tree (struct function *fun)
3851{
3852 struct eh_region *i, *outer = NULL;
3853 bool err = false;
3854 int nvisited = 0;
3855 int count = 0;
3856 int j;
3857 int depth = 0;
3858
3859 i = fun->eh->region_tree;
3860 if (! i)
3861 return;
3862 for (j = fun->eh->last_region_number; j > 0; --j)
3863 if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3864 {
3865 count++;
3866 if (i->region_number != j)
3867 {
3868 error ("region_array is corrupted for region %i", i->region_number);
3869 err = true;
3870 }
3871 }
3872
3873 while (1)
3874 {
3875 if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3876 {
3877 error ("region_array is corrupted for region %i", i->region_number);
3878 err = true;
3879 }
3880 if (i->outer != outer)
3881 {
3882 error ("outer block of region %i is wrong", i->region_number);
3883 err = true;
3884 }
3885 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3886 {
3887 error ("region %i may contain throw and is contained in region that may not",
3888 i->region_number);
3889 err = true;
3890 }
3891 if (depth < 0)
3892 {
3893 error ("negative nesting depth of region %i", i->region_number);
3894 err = true;
3895 }
3896 nvisited ++;
3897 /* If there are sub-regions, process them. */
3898 if (i->inner)
3899 outer = i, i = i->inner, depth++;
3900 /* If there are peers, process them. */
3901 else if (i->next_peer)
3902 i = i->next_peer;
3903 /* Otherwise, step back up the tree to the next peer. */
3904 else
3905 {
3906 do {
3907 i = i->outer;
3908 depth--;
3909 if (i == NULL)
3910 {
3911 if (depth != -1)
3912 {
3913 error ("tree list ends on depth %i", depth + 1);
3914 err = true;
3915 }
3916 if (count != nvisited)
3917 {
3918 error ("array does not match the region tree");
3919 err = true;
3920 }
3921 if (err)