comparison gcc/emit-rtl.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
36 36
37 #include "config.h" 37 #include "config.h"
38 #include "system.h" 38 #include "system.h"
39 #include "coretypes.h" 39 #include "coretypes.h"
40 #include "tm.h" 40 #include "tm.h"
41 #include "toplev.h" 41 #include "diagnostic-core.h"
42 #include "rtl.h" 42 #include "rtl.h"
43 #include "tree.h" 43 #include "tree.h"
44 #include "tm_p.h" 44 #include "tm_p.h"
45 #include "flags.h" 45 #include "flags.h"
46 #include "function.h" 46 #include "function.h"
57 #include "langhooks.h" 57 #include "langhooks.h"
58 #include "tree-pass.h" 58 #include "tree-pass.h"
59 #include "df.h" 59 #include "df.h"
60 #include "params.h" 60 #include "params.h"
61 #include "target.h" 61 #include "target.h"
62 #include "tree-flow.h"
63
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
62 70
63 /* Commonly used modes. */ 71 /* Commonly used modes. */
64 72
65 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ 73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
66 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ 74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
80 88
81 /* This is *not* reset after each function. It gives each CODE_LABEL 89 /* This is *not* reset after each function. It gives each CODE_LABEL
82 in the entire compilation a unique label number. */ 90 in the entire compilation a unique label number. */
83 91
84 static GTY(()) int label_num = 1; 92 static GTY(()) int label_num = 1;
85
86 /* Commonly used rtx's, so that we only need space for one copy.
87 These are initialized once for the entire compilation.
88 All of these are unique; no other rtx-object will be equal to any
89 of these. */
90
91 rtx global_rtl[GR_MAX];
92
93 /* Commonly used RTL for hard registers. These objects are not necessarily
94 unique, so we allocate them separately from global_rtl. They are
95 initialized once per compilation unit, then copied into regno_reg_rtx
96 at the beginning of each function. */
97 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
98 93
99 /* We record floating-point CONST_DOUBLEs in each floating-point mode for 94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
100 the values of 0, 1, and 2. For the integer entries and VOIDmode, we 95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
101 record a copy of const[012]_rtx. */ 96 record a copy of const[012]_rtx. */
102 97
112 107
113 /* Record fixed-point constant 0 and 1. */ 108 /* Record fixed-point constant 0 and 1. */
114 FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; 109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
115 FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; 110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
116 111
117 /* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
120
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
125
126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
128 should be used if it is being set, and frame_pointer_rtx otherwise. After
129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
132
133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
135 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
136
137 /* This is used to implement __builtin_return_address for some machines.
138 See for instance the MIPS port. */
139 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
140
141 /* We make one copy of (const_int C) where C is in 112 /* We make one copy of (const_int C) where C is in
142 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] 113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
143 to save space during the compilation and simplify comparisons of 114 to save space during the compilation and simplify comparisons of
144 integers. */ 115 integers. */
145 116
165 136
166 /* A hash table storing all CONST_FIXEDs. */ 137 /* A hash table storing all CONST_FIXEDs. */
167 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_fixed_htab; 139 htab_t const_fixed_htab;
169 140
170 #define first_insn (crtl->emit.x_first_insn)
171 #define last_insn (crtl->emit.x_last_insn)
172 #define cur_insn_uid (crtl->emit.x_cur_insn_uid) 141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
173 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid) 142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
174 #define last_location (crtl->emit.x_last_location) 143 #define last_location (crtl->emit.x_last_location)
175 #define first_label_num (crtl->emit.x_first_label_num) 144 #define first_label_num (crtl->emit.x_first_label_num)
176 145
341 attrs.addrspace = addrspace; 310 attrs.addrspace = addrspace;
342 311
343 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT); 312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
344 if (*slot == 0) 313 if (*slot == 0)
345 { 314 {
346 *slot = ggc_alloc (sizeof (mem_attrs)); 315 *slot = ggc_alloc_mem_attrs ();
347 memcpy (*slot, &attrs, sizeof (mem_attrs)); 316 memcpy (*slot, &attrs, sizeof (mem_attrs));
348 } 317 }
349 318
350 return (mem_attrs *) *slot; 319 return (mem_attrs *) *slot;
351 } 320 }
355 static hashval_t 324 static hashval_t
356 reg_attrs_htab_hash (const void *x) 325 reg_attrs_htab_hash (const void *x)
357 { 326 {
358 const reg_attrs *const p = (const reg_attrs *) x; 327 const reg_attrs *const p = (const reg_attrs *) x;
359 328
360 return ((p->offset * 1000) ^ (long) p->decl); 329 return ((p->offset * 1000) ^ (intptr_t) p->decl);
361 } 330 }
362 331
363 /* Returns nonzero if the value represented by X (which is really a 332 /* Returns nonzero if the value represented by X (which is really a
364 reg_attrs *) is the same as that given by Y (which is also really a 333 reg_attrs *) is the same as that given by Y (which is also really a
365 reg_attrs *). */ 334 reg_attrs *). */
390 attrs.offset = offset; 359 attrs.offset = offset;
391 360
392 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT); 361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
393 if (*slot == 0) 362 if (*slot == 0)
394 { 363 {
395 *slot = ggc_alloc (sizeof (reg_attrs)); 364 *slot = ggc_alloc_reg_attrs ();
396 memcpy (*slot, &attrs, sizeof (reg_attrs)); 365 memcpy (*slot, &attrs, sizeof (reg_attrs));
397 } 366 }
398 367
399 return (reg_attrs *) *slot; 368 return (reg_attrs *) *slot;
400 } 369 }
618 if (mode == Pmode && !reload_in_progress) 587 if (mode == Pmode && !reload_in_progress)
619 { 588 {
620 if (regno == FRAME_POINTER_REGNUM 589 if (regno == FRAME_POINTER_REGNUM
621 && (!reload_completed || frame_pointer_needed)) 590 && (!reload_completed || frame_pointer_needed))
622 return frame_pointer_rtx; 591 return frame_pointer_rtx;
623 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 592 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
624 if (regno == HARD_FRAME_POINTER_REGNUM 593 if (regno == HARD_FRAME_POINTER_REGNUM
625 && (!reload_completed || frame_pointer_needed)) 594 && (!reload_completed || frame_pointer_needed))
626 return hard_frame_pointer_rtx; 595 return hard_frame_pointer_rtx;
627 #endif 596 #endif
628 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
629 if (regno == ARG_POINTER_REGNUM) 598 if (regno == ARG_POINTER_REGNUM)
630 return arg_pointer_rtx; 599 return arg_pointer_rtx;
631 #endif 600 #endif
632 #ifdef RETURN_ADDRESS_POINTER_REGNUM 601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
633 if (regno == RETURN_ADDRESS_POINTER_REGNUM) 602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
634 return return_address_pointer_rtx; 603 return return_address_pointer_rtx;
635 #endif 604 #endif
636 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM 605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
637 && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
638 return pic_offset_table_rtx; 608 return pic_offset_table_rtx;
639 if (regno == STACK_POINTER_REGNUM) 609 if (regno == STACK_POINTER_REGNUM)
640 return stack_pointer_rtx; 610 return stack_pointer_rtx;
641 } 611 }
1488 unsigned HOST_WIDE_INT offset; 1458 unsigned HOST_WIDE_INT offset;
1489 1459
1490 /* This function can't use 1460 /* This function can't use
1491 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem) 1461 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1492 || !CONST_INT_P (MEM_OFFSET (mem)) 1462 || !CONST_INT_P (MEM_OFFSET (mem))
1493 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align) 1463 || (MAX (MEM_ALIGN (mem),
1464 get_object_alignment (MEM_EXPR (mem), align))
1494 < align)) 1465 < align))
1495 return -1; 1466 return -1;
1496 else 1467 else
1497 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1); 1468 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1498 for two reasons: 1469 for two reasons:
1568 1539
1569 void 1540 void
1570 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, 1541 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1571 HOST_WIDE_INT bitpos) 1542 HOST_WIDE_INT bitpos)
1572 { 1543 {
1573 alias_set_type alias = MEM_ALIAS_SET (ref); 1544 alias_set_type alias;
1574 tree expr = MEM_EXPR (ref); 1545 tree expr = NULL;
1575 rtx offset = MEM_OFFSET (ref); 1546 rtx offset = NULL_RTX;
1576 rtx size = MEM_SIZE (ref); 1547 rtx size = NULL_RTX;
1577 unsigned int align = MEM_ALIGN (ref); 1548 unsigned int align = BITS_PER_UNIT;
1578 HOST_WIDE_INT apply_bitpos = 0; 1549 HOST_WIDE_INT apply_bitpos = 0;
1579 tree type; 1550 tree type;
1580 1551
1581 /* It can happen that type_for_mode was given a mode for which there 1552 /* It can happen that type_for_mode was given a mode for which there
1582 is no language-level type. In which case it returns NULL, which 1553 is no language-level type. In which case it returns NULL, which
1608 if ((objectp || DECL_P (t)) 1579 if ((objectp || DECL_P (t))
1609 && ! AGGREGATE_TYPE_P (type) 1580 && ! AGGREGATE_TYPE_P (type)
1610 && TREE_CODE (type) != COMPLEX_TYPE) 1581 && TREE_CODE (type) != COMPLEX_TYPE)
1611 MEM_SCALAR_P (ref) = 1; 1582 MEM_SCALAR_P (ref) = 1;
1612 1583
1584 /* Default values from pre-existing memory attributes if present. */
1585 if (MEM_ATTRS (ref))
1586 {
1587 /* ??? Can this ever happen? Calling this routine on a MEM that
1588 already carries memory attributes should probably be invalid. */
1589 expr = MEM_EXPR (ref);
1590 offset = MEM_OFFSET (ref);
1591 size = MEM_SIZE (ref);
1592 align = MEM_ALIGN (ref);
1593 }
1594
1595 /* Otherwise, default values from the mode of the MEM reference. */
1596 else if (GET_MODE (ref) != BLKmode)
1597 {
1598 /* Respect mode size. */
1599 size = GEN_INT (GET_MODE_SIZE (GET_MODE (ref)));
1600 /* ??? Is this really necessary? We probably should always get
1601 the size from the type below. */
1602
1603 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1604 if T is an object, always compute the object alignment below. */
1605 if (STRICT_ALIGNMENT && TYPE_P (t))
1606 align = GET_MODE_ALIGNMENT (GET_MODE (ref));
1607 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1608 e.g. if the type carries an alignment attribute. Should we be
1609 able to simply always use TYPE_ALIGN? */
1610 }
1611
1613 /* We can set the alignment from the type if we are making an object, 1612 /* We can set the alignment from the type if we are making an object,
1614 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */ 1613 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1615 if (objectp || TREE_CODE (t) == INDIRECT_REF 1614 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1616 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1617 || TYPE_ALIGN_OK (type))
1618 align = MAX (align, TYPE_ALIGN (type)); 1615 align = MAX (align, TYPE_ALIGN (type));
1619 else 1616
1620 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF) 1617 else if (TREE_CODE (t) == MEM_REF)
1621 { 1618 {
1622 if (integer_zerop (TREE_OPERAND (t, 1))) 1619 tree op0 = TREE_OPERAND (t, 0);
1623 /* We don't know anything about the alignment. */ 1620 if (TREE_CODE (op0) == ADDR_EXPR
1624 align = BITS_PER_UNIT; 1621 && (DECL_P (TREE_OPERAND (op0, 0))
1625 else 1622 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1626 align = tree_low_cst (TREE_OPERAND (t, 1), 1); 1623 {
1627 } 1624 if (DECL_P (TREE_OPERAND (op0, 0)))
1625 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1626 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1627 {
1628 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1629 #ifdef CONSTANT_ALIGNMENT
1630 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1631 #endif
1632 }
1633 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1634 {
1635 unsigned HOST_WIDE_INT ioff
1636 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1637 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1638 align = MIN (aoff, align);
1639 }
1640 }
1641 else
1642 /* ??? This isn't fully correct, we can't set the alignment from the
1643 type in all cases. */
1644 align = MAX (align, TYPE_ALIGN (type));
1645 }
1646
1647 else if (TREE_CODE (t) == TARGET_MEM_REF)
1648 /* ??? This isn't fully correct, we can't set the alignment from the
1649 type in all cases. */
1650 align = MAX (align, TYPE_ALIGN (type));
1628 1651
1629 /* If the size is known, we can set that. */ 1652 /* If the size is known, we can set that. */
1630 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1)) 1653 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1631 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1)); 1654 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1632 1655
1645 while (CONVERT_EXPR_P (t) 1668 while (CONVERT_EXPR_P (t)
1646 || TREE_CODE (t) == VIEW_CONVERT_EXPR 1669 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1647 || TREE_CODE (t) == SAVE_EXPR) 1670 || TREE_CODE (t) == SAVE_EXPR)
1648 t = TREE_OPERAND (t, 0); 1671 t = TREE_OPERAND (t, 0);
1649 1672
1650 /* We may look through structure-like accesses for the purposes of 1673 /* Note whether this expression can trap. */
1651 examining TREE_THIS_NOTRAP, but not array-like accesses. */ 1674 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1652 base = t; 1675
1653 while (TREE_CODE (base) == COMPONENT_REF 1676 base = get_base_address (t);
1654 || TREE_CODE (base) == REALPART_EXPR
1655 || TREE_CODE (base) == IMAGPART_EXPR
1656 || TREE_CODE (base) == BIT_FIELD_REF)
1657 base = TREE_OPERAND (base, 0);
1658
1659 if (DECL_P (base))
1660 {
1661 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1662 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1663 else
1664 MEM_NOTRAP_P (ref) = 1;
1665 }
1666 else
1667 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1668
1669 base = get_base_address (base);
1670 if (base && DECL_P (base) 1677 if (base && DECL_P (base)
1671 && TREE_READONLY (base) 1678 && TREE_READONLY (base)
1672 && (TREE_STATIC (base) || DECL_EXTERNAL (base))) 1679 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1673 { 1680 && !TREE_THIS_VOLATILE (base))
1674 tree base_type = TREE_TYPE (base); 1681 MEM_READONLY_P (ref) = 1;
1675 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1676 || DECL_ARTIFICIAL (base));
1677 MEM_READONLY_P (ref) = 1;
1678 }
1679 1682
1680 /* If this expression uses it's parent's alias set, mark it such 1683 /* If this expression uses it's parent's alias set, mark it such
1681 that we won't change it. */ 1684 that we won't change it. */
1682 if (component_uses_parent_alias_set (t)) 1685 if (component_uses_parent_alias_set (t))
1683 MEM_KEEP_ALIAS_SET_P (ref) = 1; 1686 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1779 /* ??? Any reason the field size would be different than 1782 /* ??? Any reason the field size would be different than
1780 the size we got from the type? */ 1783 the size we got from the type? */
1781 } 1784 }
1782 1785
1783 /* If this is an indirect reference, record it. */ 1786 /* If this is an indirect reference, record it. */
1784 else if (TREE_CODE (t) == INDIRECT_REF 1787 else if (TREE_CODE (t) == MEM_REF)
1785 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1786 { 1788 {
1787 expr = t; 1789 expr = t;
1788 offset = const0_rtx; 1790 offset = const0_rtx;
1789 apply_bitpos = bitpos; 1791 apply_bitpos = bitpos;
1790 } 1792 }
1791 } 1793 }
1792 1794
1793 /* If this is an indirect reference, record it. */ 1795 /* If this is an indirect reference, record it. */
1794 else if (TREE_CODE (t) == INDIRECT_REF 1796 else if (TREE_CODE (t) == MEM_REF
1795 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) 1797 || TREE_CODE (t) == TARGET_MEM_REF)
1796 { 1798 {
1797 expr = t; 1799 expr = t;
1798 offset = const0_rtx; 1800 offset = const0_rtx;
1799 apply_bitpos = bitpos; 1801 apply_bitpos = bitpos;
1800 } 1802 }
1801 1803
1802 if (!align_computed && !INDIRECT_REF_P (t)) 1804 if (!align_computed && !INDIRECT_REF_P (t))
1803 { 1805 {
1804 unsigned int obj_align 1806 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1805 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1806 align = MAX (align, obj_align); 1807 align = MAX (align, obj_align);
1807 } 1808 }
1808 } 1809 }
1809 1810
1810 /* If we modified OFFSET based on T, then subtract the outstanding 1811 /* If we modified OFFSET based on T, then subtract the outstanding
1813 if (apply_bitpos) 1814 if (apply_bitpos)
1814 { 1815 {
1815 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT)); 1816 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1816 if (size) 1817 if (size)
1817 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT); 1818 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1818 }
1819
1820 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1821 {
1822 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1823 we're overlapping. */
1824 offset = NULL;
1825 expr = NULL;
1826 } 1819 }
1827 1820
1828 /* Now set the attributes we computed above. */ 1821 /* Now set the attributes we computed above. */
1829 MEM_ATTRS (ref) 1822 MEM_ATTRS (ref)
1830 = get_mem_attrs (alias, expr, offset, size, align, 1823 = get_mem_attrs (alias, expr, offset, size, align,
1851 /* Set the alias set of MEM to SET. */ 1844 /* Set the alias set of MEM to SET. */
1852 1845
1853 void 1846 void
1854 set_mem_alias_set (rtx mem, alias_set_type set) 1847 set_mem_alias_set (rtx mem, alias_set_type set)
1855 { 1848 {
1856 #ifdef ENABLE_CHECKING
1857 /* If the new and old alias sets don't conflict, something is wrong. */ 1849 /* If the new and old alias sets don't conflict, something is wrong. */
1858 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); 1850 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1859 #endif
1860 1851
1861 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem), 1852 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1862 MEM_SIZE (mem), MEM_ALIGN (mem), 1853 MEM_SIZE (mem), MEM_ALIGN (mem),
1863 MEM_ADDR_SPACE (mem), GET_MODE (mem)); 1854 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1864 } 1855 }
2252 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), 2243 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2253 VAR_DECL, get_identifier ("%sfp"), void_type_node); 2244 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2254 DECL_ARTIFICIAL (d) = 1; 2245 DECL_ARTIFICIAL (d) = 1;
2255 DECL_IGNORED_P (d) = 1; 2246 DECL_IGNORED_P (d) = 1;
2256 TREE_USED (d) = 1; 2247 TREE_USED (d) = 1;
2257 TREE_THIS_NOTRAP (d) = 1;
2258 spill_slot_decl = d; 2248 spill_slot_decl = d;
2259 2249
2260 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); 2250 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2261 MEM_NOTRAP_P (rd) = 1; 2251 MEM_NOTRAP_P (rd) = 1;
2262 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx, 2252 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2315 void 2305 void
2316 set_new_first_and_last_insn (rtx first, rtx last) 2306 set_new_first_and_last_insn (rtx first, rtx last)
2317 { 2307 {
2318 rtx insn; 2308 rtx insn;
2319 2309
2320 first_insn = first; 2310 set_first_insn (first);
2321 last_insn = last; 2311 set_last_insn (last);
2322 cur_insn_uid = 0; 2312 cur_insn_uid = 0;
2323 2313
2324 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS) 2314 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2325 { 2315 {
2326 int debug_count = 0; 2316 int debug_count = 0;
2388 2378
2389 /* Make sure that virtual stack slots are not shared. */ 2379 /* Make sure that virtual stack slots are not shared. */
2390 set_used_decls (DECL_INITIAL (cfun->decl)); 2380 set_used_decls (DECL_INITIAL (cfun->decl));
2391 2381
2392 /* Make sure that virtual parameters are not shared. */ 2382 /* Make sure that virtual parameters are not shared. */
2393 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl)) 2383 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2394 set_used_flags (DECL_RTL (decl)); 2384 set_used_flags (DECL_RTL (decl));
2395 2385
2396 reset_used_flags (stack_slot_list); 2386 reset_used_flags (stack_slot_list);
2397 2387
2398 unshare_all_rtl_1 (insn); 2388 unshare_all_rtl_1 (insn);
2535 } 2525 }
2536 2526
2537 /* Go through all the RTL insn bodies and check that there is no unexpected 2527 /* Go through all the RTL insn bodies and check that there is no unexpected
2538 sharing in between the subexpressions. */ 2528 sharing in between the subexpressions. */
2539 2529
2540 void 2530 DEBUG_FUNCTION void
2541 verify_rtl_sharing (void) 2531 verify_rtl_sharing (void)
2542 { 2532 {
2543 rtx p; 2533 rtx p;
2534
2535 timevar_push (TV_VERIFY_RTL_SHARING);
2544 2536
2545 for (p = get_insns (); p; p = NEXT_INSN (p)) 2537 for (p = get_insns (); p; p = NEXT_INSN (p))
2546 if (INSN_P (p)) 2538 if (INSN_P (p))
2547 { 2539 {
2548 reset_used_flags (PATTERN (p)); 2540 reset_used_flags (PATTERN (p));
2566 if (INSN_P (p)) 2558 if (INSN_P (p))
2567 { 2559 {
2568 verify_rtx_sharing (PATTERN (p), p); 2560 verify_rtx_sharing (PATTERN (p), p);
2569 verify_rtx_sharing (REG_NOTES (p), p); 2561 verify_rtx_sharing (REG_NOTES (p), p);
2570 } 2562 }
2563
2564 timevar_pop (TV_VERIFY_RTL_SHARING);
2571 } 2565 }
2572 2566
2573 /* Go through all the RTL insn bodies and copy any invalid shared structure. 2567 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2574 Assumes the mark bits are cleared at entry. */ 2568 Assumes the mark bits are cleared at entry. */
2575 2569
2598 set_used_decls (tree blk) 2592 set_used_decls (tree blk)
2599 { 2593 {
2600 tree t; 2594 tree t;
2601 2595
2602 /* Mark decls. */ 2596 /* Mark decls. */
2603 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t)) 2597 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2604 if (DECL_RTL_SET_P (t)) 2598 if (DECL_RTL_SET_P (t))
2605 set_used_flags (DECL_RTL (t)); 2599 set_used_flags (DECL_RTL (t));
2606 2600
2607 /* Now process sub-blocks. */ 2601 /* Now process sub-blocks. */
2608 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) 2602 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2743 goto repeat; 2737 goto repeat;
2744 } 2738 }
2745 return; 2739 return;
2746 } 2740 }
2747 2741
2748 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used 2742 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2749 to look for shared sub-parts. */ 2743
2750 2744 static void
2751 void 2745 mark_used_flags (rtx x, int flag)
2752 reset_used_flags (rtx x)
2753 { 2746 {
2754 int i, j; 2747 int i, j;
2755 enum rtx_code code; 2748 enum rtx_code code;
2756 const char *format_ptr; 2749 const char *format_ptr;
2757 int length; 2750 int length;
2793 2786
2794 default: 2787 default:
2795 break; 2788 break;
2796 } 2789 }
2797 2790
2798 RTX_FLAG (x, used) = 0; 2791 RTX_FLAG (x, used) = flag;
2799 2792
2800 format_ptr = GET_RTX_FORMAT (code); 2793 format_ptr = GET_RTX_FORMAT (code);
2801 length = GET_RTX_LENGTH (code); 2794 length = GET_RTX_LENGTH (code);
2802 2795
2803 for (i = 0; i < length; i++) 2796 for (i = 0; i < length; i++)
2808 if (i == length-1) 2801 if (i == length-1)
2809 { 2802 {
2810 x = XEXP (x, i); 2803 x = XEXP (x, i);
2811 goto repeat; 2804 goto repeat;
2812 } 2805 }
2813 reset_used_flags (XEXP (x, i)); 2806 mark_used_flags (XEXP (x, i), flag);
2814 break; 2807 break;
2815 2808
2816 case 'E': 2809 case 'E':
2817 for (j = 0; j < XVECLEN (x, i); j++) 2810 for (j = 0; j < XVECLEN (x, i); j++)
2818 reset_used_flags (XVECEXP (x, i, j)); 2811 mark_used_flags (XVECEXP (x, i, j), flag);
2819 break; 2812 break;
2820 } 2813 }
2821 } 2814 }
2822 } 2815 }
2823 2816
2817 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2818 to look for shared sub-parts. */
2819
2820 void
2821 reset_used_flags (rtx x)
2822 {
2823 mark_used_flags (x, 0);
2824 }
2825
2824 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used 2826 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2825 to look for shared sub-parts. */ 2827 to look for shared sub-parts. */
2826 2828
2827 void 2829 void
2828 set_used_flags (rtx x) 2830 set_used_flags (rtx x)
2829 { 2831 {
2830 int i, j; 2832 mark_used_flags (x, 1);
2831 enum rtx_code code;
2832 const char *format_ptr;
2833
2834 if (x == 0)
2835 return;
2836
2837 code = GET_CODE (x);
2838
2839 /* These types may be freely shared so we needn't do any resetting
2840 for them. */
2841
2842 switch (code)
2843 {
2844 case REG:
2845 case DEBUG_EXPR:
2846 case VALUE:
2847 case CONST_INT:
2848 case CONST_DOUBLE:
2849 case CONST_FIXED:
2850 case CONST_VECTOR:
2851 case SYMBOL_REF:
2852 case CODE_LABEL:
2853 case PC:
2854 case CC0:
2855 return;
2856
2857 case DEBUG_INSN:
2858 case INSN:
2859 case JUMP_INSN:
2860 case CALL_INSN:
2861 case NOTE:
2862 case LABEL_REF:
2863 case BARRIER:
2864 /* The chain of insns is not being copied. */
2865 return;
2866
2867 default:
2868 break;
2869 }
2870
2871 RTX_FLAG (x, used) = 1;
2872
2873 format_ptr = GET_RTX_FORMAT (code);
2874 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2875 {
2876 switch (*format_ptr++)
2877 {
2878 case 'e':
2879 set_used_flags (XEXP (x, i));
2880 break;
2881
2882 case 'E':
2883 for (j = 0; j < XVECLEN (x, i); j++)
2884 set_used_flags (XVECEXP (x, i, j));
2885 break;
2886 }
2887 }
2888 } 2833 }
2889 2834
2890 /* Copy X if necessary so that it won't be altered by changes in OTHER. 2835 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2891 Return X or the rtx for the pseudo reg the value of X was copied into. 2836 Return X or the rtx for the pseudo reg the value of X was copied into.
2892 OTHER must be valid as a SET_DEST. */ 2837 OTHER must be valid as a SET_DEST. */
2924 return x; 2869 return x;
2925 } 2870 }
2926 2871
2927 /* Emission of insns (adding them to the doubly-linked list). */ 2872 /* Emission of insns (adding them to the doubly-linked list). */
2928 2873
2929 /* Return the first insn of the current sequence or current function. */
2930
2931 rtx
2932 get_insns (void)
2933 {
2934 return first_insn;
2935 }
2936
2937 /* Specify a new insn as the first in the chain. */
2938
2939 void
2940 set_first_insn (rtx insn)
2941 {
2942 gcc_assert (!PREV_INSN (insn));
2943 first_insn = insn;
2944 }
2945
2946 /* Return the last insn emitted in current sequence or current function. */
2947
2948 rtx
2949 get_last_insn (void)
2950 {
2951 return last_insn;
2952 }
2953
2954 /* Specify a new insn as the last in the chain. */
2955
2956 void
2957 set_last_insn (rtx insn)
2958 {
2959 gcc_assert (!NEXT_INSN (insn));
2960 last_insn = insn;
2961 }
2962
2963 /* Return the last insn emitted, even if it is in a sequence now pushed. */ 2874 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2964 2875
2965 rtx 2876 rtx
2966 get_last_insn_anywhere (void) 2877 get_last_insn_anywhere (void)
2967 { 2878 {
2968 struct sequence_stack *stack; 2879 struct sequence_stack *stack;
2969 if (last_insn) 2880 if (get_last_insn ())
2970 return last_insn; 2881 return get_last_insn ();
2971 for (stack = seq_stack; stack; stack = stack->next) 2882 for (stack = seq_stack; stack; stack = stack->next)
2972 if (stack->last != 0) 2883 if (stack->last != 0)
2973 return stack->last; 2884 return stack->last;
2974 return 0; 2885 return 0;
2975 } 2886 }
2978 function. This routine looks inside SEQUENCEs. */ 2889 function. This routine looks inside SEQUENCEs. */
2979 2890
2980 rtx 2891 rtx
2981 get_first_nonnote_insn (void) 2892 get_first_nonnote_insn (void)
2982 { 2893 {
2983 rtx insn = first_insn; 2894 rtx insn = get_insns ();
2984 2895
2985 if (insn) 2896 if (insn)
2986 { 2897 {
2987 if (NOTE_P (insn)) 2898 if (NOTE_P (insn))
2988 for (insn = next_insn (insn); 2899 for (insn = next_insn (insn);
3004 function. This routine looks inside SEQUENCEs. */ 2915 function. This routine looks inside SEQUENCEs. */
3005 2916
3006 rtx 2917 rtx
3007 get_last_nonnote_insn (void) 2918 get_last_nonnote_insn (void)
3008 { 2919 {
3009 rtx insn = last_insn; 2920 rtx insn = get_last_insn ();
3010 2921
3011 if (insn) 2922 if (insn)
3012 { 2923 {
3013 if (NOTE_P (insn)) 2924 if (NOTE_P (insn))
3014 for (insn = previous_insn (insn); 2925 for (insn = previous_insn (insn);
3025 } 2936 }
3026 2937
3027 return insn; 2938 return insn;
3028 } 2939 }
3029 2940
3030 /* Return a number larger than any instruction's uid in this function. */
3031
3032 int
3033 get_max_uid (void)
3034 {
3035 return cur_insn_uid;
3036 }
3037
3038 /* Return the number of actual (non-debug) insns emitted in this 2941 /* Return the number of actual (non-debug) insns emitted in this
3039 function. */ 2942 function. */
3040 2943
3041 int 2944 int
3042 get_max_insn_count (void) 2945 get_max_insn_count (void)
3184 { 3087 {
3185 while (insn) 3088 while (insn)
3186 { 3089 {
3187 insn = PREV_INSN (insn); 3090 insn = PREV_INSN (insn);
3188 if (insn == 0 || !DEBUG_INSN_P (insn)) 3091 if (insn == 0 || !DEBUG_INSN_P (insn))
3092 break;
3093 }
3094
3095 return insn;
3096 }
3097
3098 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3099 This routine does not look inside SEQUENCEs. */
3100
3101 rtx
3102 next_nonnote_nondebug_insn (rtx insn)
3103 {
3104 while (insn)
3105 {
3106 insn = NEXT_INSN (insn);
3107 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3108 break;
3109 }
3110
3111 return insn;
3112 }
3113
3114 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3115 This routine does not look inside SEQUENCEs. */
3116
3117 rtx
3118 prev_nonnote_nondebug_insn (rtx insn)
3119 {
3120 while (insn)
3121 {
3122 insn = PREV_INSN (insn);
3123 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3189 break; 3124 break;
3190 } 3125 }
3191 3126
3192 return insn; 3127 return insn;
3193 } 3128 }
3623 tem = try_split (PATTERN (tem), tem, 1); 3558 tem = try_split (PATTERN (tem), tem, 1);
3624 3559
3625 /* Return either the first or the last insn, depending on which was 3560 /* Return either the first or the last insn, depending on which was
3626 requested. */ 3561 requested. */
3627 return last 3562 return last
3628 ? (after ? PREV_INSN (after) : last_insn) 3563 ? (after ? PREV_INSN (after) : get_last_insn ())
3629 : NEXT_INSN (before); 3564 : NEXT_INSN (before);
3630 } 3565 }
3631 3566
3632 /* Make and return an INSN rtx, initializing all its slots. 3567 /* Make and return an INSN rtx, initializing all its slots.
3633 Store PATTERN in the pattern slots. */ 3568 Store PATTERN in the pattern slots. */
3726 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ 3661 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3727 3662
3728 void 3663 void
3729 add_insn (rtx insn) 3664 add_insn (rtx insn)
3730 { 3665 {
3731 PREV_INSN (insn) = last_insn; 3666 PREV_INSN (insn) = get_last_insn();
3732 NEXT_INSN (insn) = 0; 3667 NEXT_INSN (insn) = 0;
3733 3668
3734 if (NULL != last_insn) 3669 if (NULL != get_last_insn())
3735 NEXT_INSN (last_insn) = insn; 3670 NEXT_INSN (get_last_insn ()) = insn;
3736 3671
3737 if (NULL == first_insn) 3672 if (NULL == get_insns ())
3738 first_insn = insn; 3673 set_first_insn (insn);
3739 3674
3740 last_insn = insn; 3675 set_last_insn (insn);
3741 } 3676 }
3742 3677
3743 /* Add INSN into the doubly-linked list after insn AFTER. This and 3678 /* Add INSN into the doubly-linked list after insn AFTER. This and
3744 the next should be the only functions called to insert an insn once 3679 the next should be the only functions called to insert an insn once
3745 delay slots have been filled since only they know how to update a 3680 delay slots have been filled since only they know how to update a
3759 { 3694 {
3760 PREV_INSN (next) = insn; 3695 PREV_INSN (next) = insn;
3761 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) 3696 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3762 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn; 3697 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3763 } 3698 }
3764 else if (last_insn == after) 3699 else if (get_last_insn () == after)
3765 last_insn = insn; 3700 set_last_insn (insn);
3766 else 3701 else
3767 { 3702 {
3768 struct sequence_stack *stack = seq_stack; 3703 struct sequence_stack *stack = seq_stack;
3769 /* Scan all pending sequences too. */ 3704 /* Scan all pending sequences too. */
3770 for (; stack; stack = stack->next) 3705 for (; stack; stack = stack->next)
3824 { 3759 {
3825 rtx sequence = PATTERN (prev); 3760 rtx sequence = PATTERN (prev);
3826 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn; 3761 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3827 } 3762 }
3828 } 3763 }
3829 else if (first_insn == before) 3764 else if (get_insns () == before)
3830 first_insn = insn; 3765 set_first_insn (insn);
3831 else 3766 else
3832 { 3767 {
3833 struct sequence_stack *stack = seq_stack; 3768 struct sequence_stack *stack = seq_stack;
3834 /* Scan all pending sequences too. */ 3769 /* Scan all pending sequences too. */
3835 for (; stack; stack = stack->next) 3770 for (; stack; stack = stack->next)
3896 { 3831 {
3897 rtx sequence = PATTERN (prev); 3832 rtx sequence = PATTERN (prev);
3898 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next; 3833 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3899 } 3834 }
3900 } 3835 }
3901 else if (first_insn == insn) 3836 else if (get_insns () == insn)
3902 first_insn = next; 3837 {
3838 if (next)
3839 PREV_INSN (next) = NULL;
3840 set_first_insn (next);
3841 }
3903 else 3842 else
3904 { 3843 {
3905 struct sequence_stack *stack = seq_stack; 3844 struct sequence_stack *stack = seq_stack;
3906 /* Scan all pending sequences too. */ 3845 /* Scan all pending sequences too. */
3907 for (; stack; stack = stack->next) 3846 for (; stack; stack = stack->next)
3918 { 3857 {
3919 PREV_INSN (next) = prev; 3858 PREV_INSN (next) = prev;
3920 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) 3859 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3921 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev; 3860 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3922 } 3861 }
3923 else if (last_insn == insn) 3862 else if (get_last_insn () == insn)
3924 last_insn = prev; 3863 set_last_insn (prev);
3925 else 3864 else
3926 { 3865 {
3927 struct sequence_stack *stack = seq_stack; 3866 struct sequence_stack *stack = seq_stack;
3928 /* Scan all pending sequences too. */ 3867 /* Scan all pending sequences too. */
3929 for (; stack; stack = stack->next) 3868 for (; stack; stack = stack->next)
3936 gcc_assert (stack); 3875 gcc_assert (stack);
3937 } 3876 }
3938 if (!BARRIER_P (insn) 3877 if (!BARRIER_P (insn)
3939 && (bb = BLOCK_FOR_INSN (insn))) 3878 && (bb = BLOCK_FOR_INSN (insn)))
3940 { 3879 {
3941 if (INSN_P (insn)) 3880 if (NONDEBUG_INSN_P (insn))
3942 df_set_bb_dirty (bb); 3881 df_set_bb_dirty (bb);
3943 if (BB_HEAD (bb) == insn) 3882 if (BB_HEAD (bb) == insn)
3944 { 3883 {
3945 /* Never ever delete the basic block note without deleting whole 3884 /* Never ever delete the basic block note without deleting whole
3946 basic block. */ 3885 basic block. */
3980 3919
3981 void 3920 void
3982 delete_insns_since (rtx from) 3921 delete_insns_since (rtx from)
3983 { 3922 {
3984 if (from == 0) 3923 if (from == 0)
3985 first_insn = 0; 3924 set_first_insn (0);
3986 else 3925 else
3987 NEXT_INSN (from) = 0; 3926 NEXT_INSN (from) = 0;
3988 last_insn = from; 3927 set_last_insn (from);
3989 } 3928 }
3990 3929
3991 /* This function is deprecated, please use sequences instead. 3930 /* This function is deprecated, please use sequences instead.
3992 3931
3993 Move a consecutive bunch of insns to a different place in the chain. 3932 Move a consecutive bunch of insns to a different place in the chain.
3999 called after delay-slot filling has been done. */ 3938 called after delay-slot filling has been done. */
4000 3939
4001 void 3940 void
4002 reorder_insns_nobb (rtx from, rtx to, rtx after) 3941 reorder_insns_nobb (rtx from, rtx to, rtx after)
4003 { 3942 {
3943 #ifdef ENABLE_CHECKING
3944 rtx x;
3945 for (x = from; x != to; x = NEXT_INSN (x))
3946 gcc_assert (after != x);
3947 gcc_assert (after != to);
3948 #endif
3949
4004 /* Splice this bunch out of where it is now. */ 3950 /* Splice this bunch out of where it is now. */
4005 if (PREV_INSN (from)) 3951 if (PREV_INSN (from))
4006 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); 3952 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4007 if (NEXT_INSN (to)) 3953 if (NEXT_INSN (to))
4008 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); 3954 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4009 if (last_insn == to) 3955 if (get_last_insn () == to)
4010 last_insn = PREV_INSN (from); 3956 set_last_insn (PREV_INSN (from));
4011 if (first_insn == from) 3957 if (get_insns () == from)
4012 first_insn = NEXT_INSN (to); 3958 set_first_insn (NEXT_INSN (to));
4013 3959
4014 /* Make the new neighbors point to it and it to them. */ 3960 /* Make the new neighbors point to it and it to them. */
4015 if (NEXT_INSN (after)) 3961 if (NEXT_INSN (after))
4016 PREV_INSN (NEXT_INSN (after)) = to; 3962 PREV_INSN (NEXT_INSN (after)) = to;
4017 3963
4018 NEXT_INSN (to) = NEXT_INSN (after); 3964 NEXT_INSN (to) = NEXT_INSN (after);
4019 PREV_INSN (from) = after; 3965 PREV_INSN (from) = after;
4020 NEXT_INSN (after) = from; 3966 NEXT_INSN (after) = from;
4021 if (after == last_insn) 3967 if (after == get_last_insn())
4022 last_insn = to; 3968 set_last_insn (to);
4023 } 3969 }
4024 3970
4025 /* Same as function above, but take care to update BB boundaries. */ 3971 /* Same as function above, but take care to update BB boundaries. */
4026 void 3972 void
4027 reorder_insns (rtx from, rtx to, rtx after) 3973 reorder_insns (rtx from, rtx to, rtx after)
4342 PREV_INSN (first) = after; 4288 PREV_INSN (first) = after;
4343 NEXT_INSN (last) = after_after; 4289 NEXT_INSN (last) = after_after;
4344 if (after_after) 4290 if (after_after)
4345 PREV_INSN (after_after) = last; 4291 PREV_INSN (after_after) = last;
4346 4292
4347 if (after == last_insn) 4293 if (after == get_last_insn())
4348 last_insn = last; 4294 set_last_insn (last);
4349 4295
4350 return last; 4296 return last;
4351 } 4297 }
4352 4298
4353 /* Make X be output after the insn AFTER and set the BB of insn. If 4299 /* Make X be output after the insn AFTER and set the BB of insn. If
4840 Returns the last insn emitted. */ 4786 Returns the last insn emitted. */
4841 4787
4842 rtx 4788 rtx
4843 emit_insn (rtx x) 4789 emit_insn (rtx x)
4844 { 4790 {
4845 rtx last = last_insn; 4791 rtx last = get_last_insn();
4846 rtx insn; 4792 rtx insn;
4847 4793
4848 if (x == NULL_RTX) 4794 if (x == NULL_RTX)
4849 return last; 4795 return last;
4850 4796
4886 and add it to the end of the doubly-linked list. */ 4832 and add it to the end of the doubly-linked list. */
4887 4833
4888 rtx 4834 rtx
4889 emit_debug_insn (rtx x) 4835 emit_debug_insn (rtx x)
4890 { 4836 {
4891 rtx last = last_insn; 4837 rtx last = get_last_insn();
4892 rtx insn; 4838 rtx insn;
4893 4839
4894 if (x == NULL_RTX) 4840 if (x == NULL_RTX)
4895 return last; 4841 return last;
4896 4842
5278 { 5224 {
5279 tem = free_sequence_stack; 5225 tem = free_sequence_stack;
5280 free_sequence_stack = tem->next; 5226 free_sequence_stack = tem->next;
5281 } 5227 }
5282 else 5228 else
5283 tem = GGC_NEW (struct sequence_stack); 5229 tem = ggc_alloc_sequence_stack ();
5284 5230
5285 tem->next = seq_stack; 5231 tem->next = seq_stack;
5286 tem->first = first_insn; 5232 tem->first = get_insns ();
5287 tem->last = last_insn; 5233 tem->last = get_last_insn ();
5288 5234
5289 seq_stack = tem; 5235 seq_stack = tem;
5290 5236
5291 first_insn = 0; 5237 set_first_insn (0);
5292 last_insn = 0; 5238 set_last_insn (0);
5293 } 5239 }
5294 5240
5295 /* Set up the insn chain starting with FIRST as the current sequence, 5241 /* Set up the insn chain starting with FIRST as the current sequence,
5296 saving the previously current one. See the documentation for 5242 saving the previously current one. See the documentation for
5297 start_sequence for more information about how to use this function. */ 5243 start_sequence for more information about how to use this function. */
5303 5249
5304 start_sequence (); 5250 start_sequence ();
5305 5251
5306 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)); 5252 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5307 5253
5308 first_insn = first; 5254 set_first_insn (first);
5309 last_insn = last; 5255 set_last_insn (last);
5310 } 5256 }
5311 5257
5312 /* Like push_to_sequence, but take the last insn as an argument to avoid 5258 /* Like push_to_sequence, but take the last insn as an argument to avoid
5313 looping through the list. */ 5259 looping through the list. */
5314 5260
5315 void 5261 void
5316 push_to_sequence2 (rtx first, rtx last) 5262 push_to_sequence2 (rtx first, rtx last)
5317 { 5263 {
5318 start_sequence (); 5264 start_sequence ();
5319 5265
5320 first_insn = first; 5266 set_first_insn (first);
5321 last_insn = last; 5267 set_last_insn (last);
5322 } 5268 }
5323 5269
5324 /* Set up the outer-level insn chain 5270 /* Set up the outer-level insn chain
5325 as the current sequence, saving the previously current one. */ 5271 as the current sequence, saving the previously current one. */
5326 5272
5332 start_sequence (); 5278 start_sequence ();
5333 5279
5334 for (stack = seq_stack; stack; stack = stack->next) 5280 for (stack = seq_stack; stack; stack = stack->next)
5335 top = stack; 5281 top = stack;
5336 5282
5337 first_insn = top->first; 5283 set_first_insn (top->first);
5338 last_insn = top->last; 5284 set_last_insn (top->last);
5339 } 5285 }
5340 5286
5341 /* After emitting to the outer-level insn chain, update the outer-level 5287 /* After emitting to the outer-level insn chain, update the outer-level
5342 insn chain, and restore the previous saved state. */ 5288 insn chain, and restore the previous saved state. */
5343 5289
5347 struct sequence_stack *stack, *top = NULL; 5293 struct sequence_stack *stack, *top = NULL;
5348 5294
5349 for (stack = seq_stack; stack; stack = stack->next) 5295 for (stack = seq_stack; stack; stack = stack->next)
5350 top = stack; 5296 top = stack;
5351 5297
5352 top->first = first_insn; 5298 top->first = get_insns ();
5353 top->last = last_insn; 5299 top->last = get_last_insn ();
5354 5300
5355 end_sequence (); 5301 end_sequence ();
5356 } 5302 }
5357 5303
5358 /* After emitting to a sequence, restore previous saved state. 5304 /* After emitting to a sequence, restore previous saved state.
5371 void 5317 void
5372 end_sequence (void) 5318 end_sequence (void)
5373 { 5319 {
5374 struct sequence_stack *tem = seq_stack; 5320 struct sequence_stack *tem = seq_stack;
5375 5321
5376 first_insn = tem->first; 5322 set_first_insn (tem->first);
5377 last_insn = tem->last; 5323 set_last_insn (tem->last);
5378 seq_stack = tem->next; 5324 seq_stack = tem->next;
5379 5325
5380 memset (tem, 0, sizeof (*tem)); 5326 memset (tem, 0, sizeof (*tem));
5381 tem->next = free_sequence_stack; 5327 tem->next = free_sequence_stack;
5382 free_sequence_stack = tem; 5328 free_sequence_stack = tem;
5398 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; 5344 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5399 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; 5345 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5400 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; 5346 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5401 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; 5347 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5402 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; 5348 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5349 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5350 = virtual_preferred_stack_boundary_rtx;
5403 } 5351 }
5404 5352
5405 5353
5406 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ 5354 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5407 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; 5355 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5574 before generating rtl for each function. */ 5522 before generating rtl for each function. */
5575 5523
5576 void 5524 void
5577 init_emit (void) 5525 init_emit (void)
5578 { 5526 {
5579 first_insn = NULL; 5527 set_first_insn (NULL);
5580 last_insn = NULL; 5528 set_last_insn (NULL);
5581 if (MIN_NONDEBUG_INSN_UID) 5529 if (MIN_NONDEBUG_INSN_UID)
5582 cur_insn_uid = MIN_NONDEBUG_INSN_UID; 5530 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5583 else 5531 else
5584 cur_insn_uid = 1; 5532 cur_insn_uid = 1;
5585 cur_debug_insn_uid = 1; 5533 cur_debug_insn_uid = 1;
5593 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; 5541 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5594 5542
5595 crtl->emit.regno_pointer_align 5543 crtl->emit.regno_pointer_align
5596 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length); 5544 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5597 5545
5598 regno_reg_rtx 5546 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5599 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
5600 5547
5601 /* Put copies of all the hard registers into regno_reg_rtx. */ 5548 /* Put copies of all the hard registers into regno_reg_rtx. */
5602 memcpy (regno_reg_rtx, 5549 memcpy (regno_reg_rtx,
5603 static_regno_reg_rtx, 5550 initial_regno_reg_rtx,
5604 FIRST_PSEUDO_REGISTER * sizeof (rtx)); 5551 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5605 5552
5606 /* Put copies of all the virtual register rtx into regno_reg_rtx. */ 5553 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5607 init_virtual_regs (); 5554 init_virtual_regs ();
5608 5555
5721 virtual_stack_dynamic_rtx = 5668 virtual_stack_dynamic_rtx =
5722 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); 5669 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5723 virtual_outgoing_args_rtx = 5670 virtual_outgoing_args_rtx =
5724 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); 5671 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5725 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); 5672 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5673 virtual_preferred_stack_boundary_rtx =
5674 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5726 5675
5727 /* Initialize RTL for commonly used hard registers. These are 5676 /* Initialize RTL for commonly used hard registers. These are
5728 copied into regno_reg_rtx as we begin to compile each function. */ 5677 copied into regno_reg_rtx as we begin to compile each function. */
5729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 5678 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5730 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); 5679 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5731 5680
5732 #ifdef RETURN_ADDRESS_POINTER_REGNUM 5681 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5733 return_address_pointer_rtx 5682 return_address_pointer_rtx
5734 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); 5683 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5735 #endif 5684 #endif