aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog30
-rw-r--r--gcc/alias.c8
-rw-r--r--gcc/builtins.c186
-rw-r--r--gcc/c-objc-common.c12
-rw-r--r--gcc/config/i386/i386.c257
-rw-r--r--gcc/config/ia64/ia64.c38
-rw-r--r--gcc/config/rs6000/rs6000.c242
-rw-r--r--gcc/fold-const.c69
-rw-r--r--gcc/gimplify.c66
-rw-r--r--gcc/target-def.h9
-rw-r--r--gcc/target.h4
-rw-r--r--gcc/tree-gimple.h3
-rw-r--r--gcc/tree.h5
13 files changed, 869 insertions, 60 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index a19dcb2..1793e5c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,33 @@
+2004-06-08 Jason Merrill <jason@redhat.com>
+
+ Gimplify VA_ARG_EXPR into simpler forms.
+ * target.h: Add gimplify_va_arg_expr hook.
+ * target-def.h: Add TARGET_GIMPLIFY_VA_ARG_EXPR.
+ * fold-const.c (build_fold_addr_expr)
+ (build_fold_addr_expr_with_type): Move from gimplify.c.
+ * tree.h: Declare them.
+ * gimplify.c (gimplify_and_add): New fn.
+ (build_addr_expr, build_addr_expr_with_type): Move to fold-const.c.
+ (gimplify_array_ref_to_plus, gimplify_modify_expr)
+ (gimplify_expr): Use build_fold_*.
+ (copy_if_shared_r): Only mark VA_ARG_EXPR volatile if we
+ don't know how to gimplify it.
+ * builtins.c (std_gimplify_va_arg_expr): New fn.
+ (dummy_object): New static fn.
+ (gimplify_va_arg_expr): New fn.
+ (stabilize_va_list): Use build_fold_*.
+ * tree-gimple.h: Declare new fns.
+ * config/i386/i386.c (TARGET_GIMPLIFY_VA_ARG_EXPR): Define.
+ (ix86_gimplify_va_arg): New fn.
+ * config/i386/ia64.c (TARGET_GIMPLIFY_VA_ARG_EXPR): Define.
+ (ia64_gimplify_va_arg): New fn.
+ * config/i386/rs6000.c (rs6000_gimplify_va_arg): New fn.
+ (TARGET_GIMPLIFY_VA_ARG_EXPR): Define.
+ * config/i386/sparc.c (sparc_gimplify_va_arg): New fn.
+ * alias.c (get_varargs_alias_set): Just return 0 for now.
+
+ * c-objc-common.c (c_tree_printer): Improve handling of %T.
+
2004-06-09 Andrew Pinski <pinskia@physics.uc.edu>
* tree-complex.c (expand_complex_comparison): Use fold_convert instead
diff --git a/gcc/alias.c b/gcc/alias.c
index e41e456..3e2bbbb 100644
--- a/gcc/alias.c
+++ b/gcc/alias.c
@@ -737,10 +737,18 @@ static GTY(()) HOST_WIDE_INT varargs_set = -1;
HOST_WIDE_INT
get_varargs_alias_set (void)
{
+#if 1
+ /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
+ varargs alias set to an INDIRECT_REF (FIXME!), so we can't
+ consistently use the varargs alias set for loads from the varargs
+ area. So don't use it anywhere. */
+ return 0;
+#else
if (varargs_set == -1)
varargs_set = new_alias_set ();
return varargs_set;
+#endif
}
/* Likewise, but used for the fixed portions of the frame, e.g., register
diff --git a/gcc/builtins.c b/gcc/builtins.c
index d10924f..9816af0 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -4106,10 +4106,7 @@ stabilize_va_list (tree valist, int needs_lvalue)
if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
{
tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
- tree p2 = build_pointer_type (va_list_type_node);
-
- valist = build1 (ADDR_EXPR, p2, valist);
- valist = fold_convert (p1, valist);
+ valist = build_fold_addr_expr_with_type (valist, p1);
}
}
else
@@ -4128,8 +4125,7 @@ stabilize_va_list (tree valist, int needs_lvalue)
if (TREE_SIDE_EFFECTS (valist))
valist = save_expr (valist);
- valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
- valist));
+ valist = build_fold_indirect_ref (valist);
}
return valist;
@@ -4364,6 +4360,184 @@ expand_builtin_va_arg (tree valist, tree type)
return result;
}
+/* Like std_expand_builtin_va_arg, but gimplify instead of expanding. */
+
+void
+std_gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree addr, t, type_size = NULL;
+ tree align, alignm1;
+ tree rounded_size;
+ HOST_WIDE_INT boundary;
+ tree valist = TREE_OPERAND (*expr_p, 0);
+ tree type = TREE_TYPE (*expr_p);
+
+ /* Compute the rounded size of the type. */
+ align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
+ alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
+ boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
+
+ /* Reduce valist it so it's sharable with the postqueue. */
+ gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
+
+ /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
+ requires greater alignment, we must perform dynamic alignment. */
+
+ if (boundary > PARM_BOUNDARY)
+ {
+ if (!PAD_VARARGS_DOWN)
+ {
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ build2 (PLUS_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (boundary / BITS_PER_UNIT - 1, 0)));
+ gimplify_stmt (&t);
+ append_to_statement_list (t, pre_p);
+ }
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (~(boundary / BITS_PER_UNIT - 1), -1)));
+ gimplify_stmt (&t);
+ append_to_statement_list (t, pre_p);
+ }
+ if (type == error_mark_node
+ || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
+ || TREE_OVERFLOW (type_size))
+ rounded_size = size_zero_node;
+ else
+ {
+ rounded_size = fold (build2 (PLUS_EXPR, sizetype, type_size, alignm1));
+ rounded_size = fold (build2 (TRUNC_DIV_EXPR, sizetype,
+ rounded_size, align));
+ rounded_size = fold (build2 (MULT_EXPR, sizetype,
+ rounded_size, align));
+ }
+
+ /* Reduce rounded_size so it's sharable with the postqueue. */
+ gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
+
+ /* Get AP. */
+ addr = valist;
+ if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
+ {
+ /* Small args are padded downward. */
+ addr = fold (build2 (PLUS_EXPR, TREE_TYPE (addr), addr,
+ fold (build3 (COND_EXPR, sizetype,
+ fold (build2 (GT_EXPR, sizetype,
+ rounded_size,
+ align)),
+ size_zero_node,
+ fold (build2 (MINUS_EXPR,
+ sizetype,
+ rounded_size,
+ type_size))))));
+ }
+
+ addr = convert (build_pointer_type (type), addr);
+ *expr_p = build1 (INDIRECT_REF, type, addr);
+
+ /* Compute new value for AP. */
+ if (! integer_zerop (rounded_size))
+ {
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ build2 (PLUS_EXPR, TREE_TYPE (valist), valist,
+ rounded_size));
+ gimplify_stmt (&t);
+ append_to_statement_list (t, post_p);
+ }
+}
+
+/* Return a dummy expression of type TYPE in order to keep going after an
+ error. */
+
+static tree
+dummy_object (tree type)
+{
+ tree t = convert (build_pointer_type (type), null_pointer_node);
+ return build1 (INDIRECT_REF, type, t);
+}
+
+/* Like expand_builtin_va_arg, but gimplify instead of expanding. */
+
+enum gimplify_status
+gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree promoted_type, want_va_type, have_va_type;
+ tree valist = TREE_OPERAND (*expr_p, 0);
+ tree type = TREE_TYPE (*expr_p);
+ tree t;
+
+ /* Verify that valist is of the proper type. */
+
+ want_va_type = va_list_type_node;
+ have_va_type = TREE_TYPE (valist);
+ if (TREE_CODE (want_va_type) == ARRAY_TYPE)
+ {
+ /* If va_list is an array type, the argument may have decayed
+ to a pointer type, e.g. by being passed to another function.
+ In that case, unwrap both types so that we can compare the
+ underlying records. */
+ if (TREE_CODE (have_va_type) == ARRAY_TYPE
+ || TREE_CODE (have_va_type) == POINTER_TYPE)
+ {
+ want_va_type = TREE_TYPE (want_va_type);
+ have_va_type = TREE_TYPE (have_va_type);
+ }
+ }
+
+ if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
+ {
+ error ("first argument to `va_arg' not of type `va_list'");
+ *expr_p = dummy_object (type);
+ return GS_ALL_DONE;
+ }
+
+ /* Generate a diagnostic for requesting data of a type that cannot
+ be passed through `...' due to type promotion at the call site. */
+ else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
+ != type)
+ {
+ static bool gave_help;
+
+ /* Unfortunately, this is merely undefined, rather than a constraint
+ violation, so we cannot make this an error. If this call is never
+ executed, the program is still strictly conforming. */
+ warning ("`%T' is promoted to `%T' when passed through `...'",
+ type, promoted_type);
+ if (! gave_help)
+ {
+ gave_help = true;
+ warning ("(so you should pass `%T' not `%T' to `va_arg')",
+ promoted_type, type);
+ }
+
+ /* We can, however, treat "undefined" any way we please.
+ Call abort to encourage the user to fix the program. */
+ inform ("if this code is reached, the program will abort");
+ t = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
+ NULL);
+ append_to_statement_list (t, pre_p);
+
+ /* This is dead code, but go ahead and finish so that the
+ mode of the result comes out right. */
+ *expr_p = dummy_object (type);
+ return GS_ALL_DONE;
+ }
+ else
+ {
+ /* Make it easier for the backends by protecting the valist argument
+ from multiple evaluations. */
+ valist = stabilize_va_list (valist, 0);
+ TREE_OPERAND (*expr_p, 0) = valist;
+
+ if (!targetm.calls.gimplify_va_arg_expr)
+ /* Once most targets are converted this should abort. */
+ return GS_ALL_DONE;
+
+ targetm.calls.gimplify_va_arg_expr (expr_p, pre_p, post_p);
+ return GS_OK;
+ }
+}
+
/* Expand ARGLIST, from a call to __builtin_va_end. */
static rtx
diff --git a/gcc/c-objc-common.c b/gcc/c-objc-common.c
index 6b17ad4..f32bf6d 100644
--- a/gcc/c-objc-common.c
+++ b/gcc/c-objc-common.c
@@ -270,17 +270,15 @@ c_tree_printer (pretty_printer *pp, text_info *text)
break;
case 'T':
- if (TREE_CODE (t) == TYPE_DECL)
+ if (TYPE_P (t))
+ t = TYPE_NAME (t);
+ if (t && TREE_CODE (t) == TYPE_DECL)
{
if (DECL_NAME (t))
n = lang_hooks.decl_printable_name (t, 2);
}
- else
- {
- t = TYPE_NAME (t);
- if (t)
- n = IDENTIFIER_POINTER (t);
- }
+ else if (t)
+ n = IDENTIFIER_POINTER (t);
break;
case 'E':
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 1ae82e8..6e2463b 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -46,6 +46,7 @@ Boston, MA 02111-1307, USA. */
#include "target-def.h"
#include "langhooks.h"
#include "cgraph.h"
+#include "tree-gimple.h"
#ifndef CHECK_STACK_LIMIT
#define CHECK_STACK_LIMIT (-1)
@@ -877,6 +878,7 @@ static bool ix86_expand_carry_flag_compare (enum rtx_code, rtx, rtx, rtx*);
static tree ix86_build_builtin_va_list (void);
static void ix86_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
tree, int *, int);
+static void ix86_gimplify_va_arg (tree *expr_p, tree *pre_p, tree *post_p);
struct ix86_address
{
@@ -1069,6 +1071,9 @@ static void init_ext_80387_constants (void);
#undef TARGET_SETUP_INCOMING_VARARGS
#define TARGET_SETUP_INCOMING_VARARGS ix86_setup_incoming_varargs
+#undef TARGET_GIMPLIFY_VA_ARG_EXPR
+#define TARGET_GIMPLIFY_VA_ARG_EXPR ix86_gimplify_va_arg
+
struct gcc_target targetm = TARGET_INITIALIZER;
@@ -3410,6 +3415,258 @@ ix86_va_arg (tree valist, tree type)
return addr_rtx;
}
+
+/* Lower VA_ARG_EXPR at gimplification time. */
+
+void
+ix86_gimplify_va_arg (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree valist = TREE_OPERAND (*expr_p, 0);
+ tree type = TREE_TYPE (*expr_p);
+ static const int intreg[6] = { 0, 1, 2, 3, 4, 5 };
+ tree f_gpr, f_fpr, f_ovf, f_sav;
+ tree gpr, fpr, ovf, sav, t;
+ int size, rsize;
+ tree lab_false, lab_over = NULL_TREE;
+ tree addr, t2;
+ rtx container;
+ int indirect_p = 0;
+ tree ptrtype;
+
+ /* Only 64bit target needs something special. */
+ if (!TARGET_64BIT)
+ {
+ std_gimplify_va_arg_expr (expr_p, pre_p, post_p);
+ return;
+ }
+
+ f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
+ f_fpr = TREE_CHAIN (f_gpr);
+ f_ovf = TREE_CHAIN (f_fpr);
+ f_sav = TREE_CHAIN (f_ovf);
+
+ valist = build_fold_indirect_ref (valist);
+ gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
+ fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
+ ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
+ sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
+
+ size = int_size_in_bytes (type);
+ if (size == -1)
+ {
+ /* Variable-size types are passed by reference. */
+ indirect_p = 1;
+ type = build_pointer_type (type);
+ size = int_size_in_bytes (type);
+ }
+ rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
+
+ container = construct_container (TYPE_MODE (type), type, 0,
+ REGPARM_MAX, SSE_REGPARM_MAX, intreg, 0);
+ /*
+ * Pull the value out of the saved registers ...
+ */
+
+ addr = create_tmp_var (ptr_type_node, "addr");
+ DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
+
+ if (container)
+ {
+ int needed_intregs, needed_sseregs;
+ int need_temp;
+ tree int_addr, sse_addr;
+
+ lab_false = create_artificial_label ();
+ lab_over = create_artificial_label ();
+
+ examine_argument (TYPE_MODE (type), type, 0,
+ &needed_intregs, &needed_sseregs);
+
+
+ need_temp = ((needed_intregs && TYPE_ALIGN (type) > 64)
+ || TYPE_ALIGN (type) > 128);
+
+ /* In case we are passing structure, verify that it is consecutive block
+ on the register save area. If not we need to do moves. */
+ if (!need_temp && !REG_P (container))
+ {
+ /* Verify that all registers are strictly consecutive */
+ if (SSE_REGNO_P (REGNO (XEXP (XVECEXP (container, 0, 0), 0))))
+ {
+ int i;
+
+ for (i = 0; i < XVECLEN (container, 0) && !need_temp; i++)
+ {
+ rtx slot = XVECEXP (container, 0, i);
+ if (REGNO (XEXP (slot, 0)) != FIRST_SSE_REG + (unsigned int) i
+ || INTVAL (XEXP (slot, 1)) != i * 16)
+ need_temp = 1;
+ }
+ }
+ else
+ {
+ int i;
+
+ for (i = 0; i < XVECLEN (container, 0) && !need_temp; i++)
+ {
+ rtx slot = XVECEXP (container, 0, i);
+ if (REGNO (XEXP (slot, 0)) != (unsigned int) i
+ || INTVAL (XEXP (slot, 1)) != i * 8)
+ need_temp = 1;
+ }
+ }
+ }
+ if (!need_temp)
+ {
+ int_addr = addr;
+ sse_addr = addr;
+ }
+ else
+ {
+ int_addr = create_tmp_var (ptr_type_node, "int_addr");
+ DECL_POINTER_ALIAS_SET (int_addr) = get_varargs_alias_set ();
+ sse_addr = create_tmp_var (ptr_type_node, "sse_addr");
+ DECL_POINTER_ALIAS_SET (sse_addr) = get_varargs_alias_set ();
+ }
+ /* First ensure that we fit completely in registers. */
+ if (needed_intregs)
+ {
+ t = build_int_2 ((REGPARM_MAX - needed_intregs + 1) * 8, 0);
+ TREE_TYPE (t) = TREE_TYPE (gpr);
+ t = build2 (GE_EXPR, boolean_type_node, gpr, t);
+ t2 = build1 (GOTO_EXPR, void_type_node, lab_false);
+ t = build (COND_EXPR, void_type_node, t, t2, NULL_TREE);
+ gimplify_and_add (t, pre_p);
+ }
+ if (needed_sseregs)
+ {
+ t = build_int_2 ((SSE_REGPARM_MAX - needed_sseregs + 1) * 16
+ + REGPARM_MAX * 8, 0);
+ TREE_TYPE (t) = TREE_TYPE (fpr);
+ t = build2 (GE_EXPR, boolean_type_node, fpr, t);
+ t2 = build1 (GOTO_EXPR, void_type_node, lab_false);
+ t = build (COND_EXPR, void_type_node, t, t2, NULL_TREE);
+ gimplify_and_add (t, pre_p);
+ }
+
+ /* Compute index to start of area used for integer regs. */
+ if (needed_intregs)
+ {
+ /* int_addr = gpr + sav; */
+ t = build2 (PLUS_EXPR, ptr_type_node, sav, gpr);
+ t = build2 (MODIFY_EXPR, void_type_node, int_addr, t);
+ gimplify_and_add (t, pre_p);
+ }
+ if (needed_sseregs)
+ {
+ /* sse_addr = fpr + sav; */
+ t = build2 (PLUS_EXPR, ptr_type_node, sav, fpr);
+ t = build2 (MODIFY_EXPR, void_type_node, sse_addr, t);
+ gimplify_and_add (t, pre_p);
+ }
+ if (need_temp)
+ {
+ int i;
+ tree temp = create_tmp_var (type, "va_arg_tmp");
+
+ /* addr = &temp; */
+ t = build1 (ADDR_EXPR, build_pointer_type (type), temp);
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (t, pre_p);
+
+ for (i = 0; i < XVECLEN (container, 0); i++)
+ {
+ rtx slot = XVECEXP (container, 0, i);
+ rtx reg = XEXP (slot, 0);
+ enum machine_mode mode = GET_MODE (reg);
+ tree piece_type = lang_hooks.types.type_for_mode (mode, 1);
+ tree addr_type = build_pointer_type (piece_type);
+ tree src_addr, src;
+ int src_offset;
+ tree dest_addr, dest;
+
+ if (SSE_REGNO_P (REGNO (reg)))
+ {
+ src_addr = sse_addr;
+ src_offset = (REGNO (reg) - FIRST_SSE_REG) * 16;
+ }
+ else
+ {
+ src_addr = int_addr;
+ src_offset = REGNO (reg) * 8;
+ }
+ src_addr = convert (addr_type, src_addr);
+ src_addr = fold (build2 (PLUS_EXPR, addr_type, src_addr,
+ size_int (src_offset)));
+ src = build_fold_indirect_ref (src_addr);
+
+ dest_addr = convert (addr_type, addr);
+ dest_addr = fold (build2 (PLUS_EXPR, addr_type, dest_addr,
+ size_int (INTVAL (XEXP (slot, 1)))));
+ dest = build_fold_indirect_ref (dest_addr);
+
+ t = build2 (MODIFY_EXPR, void_type_node, dest, src);
+ gimplify_and_add (t, pre_p);
+ }
+ }
+
+ if (needed_intregs)
+ {
+ t = build2 (PLUS_EXPR, TREE_TYPE (gpr), gpr,
+ build_int_2 (needed_intregs * 8, 0));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr, t);
+ gimplify_and_add (t, pre_p);
+ }
+ if (needed_sseregs)
+ {
+ t =
+ build2 (PLUS_EXPR, TREE_TYPE (fpr), fpr,
+ build_int_2 (needed_sseregs * 16, 0));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr, t);
+ gimplify_and_add (t, pre_p);
+ }
+
+ t = build1 (GOTO_EXPR, void_type_node, lab_over);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (LABEL_EXPR, void_type_node, lab_false);
+ append_to_statement_list (t, pre_p);
+ }
+
+ /* ... otherwise out of the overflow area. */
+
+ /* Care for on-stack alignment if needed. */
+ if (FUNCTION_ARG_BOUNDARY (VOIDmode, type) <= 64)
+ t = ovf;
+ else
+ {
+ HOST_WIDE_INT align = FUNCTION_ARG_BOUNDARY (VOIDmode, type) / 8;
+ t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align - 1, 0));
+ t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
+ }
+ gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
+
+ t2 = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (t2, pre_p);
+
+ t = build2 (PLUS_EXPR, TREE_TYPE (t), t,
+ build_int_2 (rsize * UNITS_PER_WORD, 0));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
+ gimplify_and_add (t, pre_p);
+
+ if (container)
+ {
+ t = build1 (LABEL_EXPR, void_type_node, lab_over);
+ append_to_statement_list (t, pre_p);
+ }
+
+ ptrtype = build_pointer_type (type);
+ addr = convert (ptrtype, addr);
+
+ if (indirect_p)
+ addr = build_fold_indirect_ref (addr);
+ *expr_p = build_fold_indirect_ref (addr);
+}
/* Return nonzero if OP is either a i387 or SSE fp register. */
int
diff --git a/gcc/config/ia64/ia64.c b/gcc/config/ia64/ia64.c
index 0891ed2..14fdfe2 100644
--- a/gcc/config/ia64/ia64.c
+++ b/gcc/config/ia64/ia64.c
@@ -51,6 +51,7 @@ Boston, MA 02111-1307, USA. */
#include "hashtab.h"
#include "langhooks.h"
#include "cfglayout.h"
+#include "tree-gimple.h"
/* This is used for communication between ASM_OUTPUT_LABEL and
ASM_OUTPUT_LABELREF. */
@@ -273,6 +274,7 @@ static void ia64_vms_init_libfuncs (void)
static tree ia64_handle_model_attribute (tree *, tree, tree, int, bool *);
static void ia64_encode_section_info (tree, rtx, int);
static rtx ia64_struct_value_rtx (tree, int);
+static void ia64_gimplify_va_arg (tree *, tree *, tree *);
/* Table of valid machine attributes. */
@@ -407,6 +409,9 @@ static const struct attribute_spec ia64_attribute_table[] =
#undef TARGET_STRICT_ARGUMENT_NAMING
#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
+#undef TARGET_GIMPLIFY_VA_ARG_EXPR
+#define TARGET_GIMPLIFY_VA_ARG_EXPR ia64_gimplify_va_arg
+
struct gcc_target targetm = TARGET_INITIALIZER;
/* Return 1 if OP is a valid operand for the MEM of a CALL insn. */
@@ -3987,6 +3992,39 @@ ia64_va_arg (tree valist, tree type)
return std_expand_builtin_va_arg (valist, type);
}
+
+static void
+ia64_gimplify_va_arg (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree valist = TREE_OPERAND (*expr_p, 0);
+ tree type = TREE_TYPE (*expr_p);
+
+ /* Variable sized types are passed by reference. */
+ if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ {
+ TREE_TYPE (*expr_p) = build_pointer_type (type);
+ std_gimplify_va_arg_expr (expr_p, pre_p, post_p);
+ *expr_p = build_fold_indirect_ref (*expr_p);
+ return;
+ }
+
+ /* Aggregate arguments with alignment larger than 8 bytes start at
+ the next even boundary. Integer and floating point arguments
+ do so if they are larger than 8 bytes, whether or not they are
+ also aligned larger than 8 bytes. */
+ if ((TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == INTEGER_TYPE)
+ ? int_size_in_bytes (type) > 8 : TYPE_ALIGN (type) > 8 * BITS_PER_UNIT)
+ {
+ tree t = build (PLUS_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (2 * UNITS_PER_WORD - 1, 0));
+ t = build (BIT_AND_EXPR, TREE_TYPE (t), t,
+ build_int_2 (-2 * UNITS_PER_WORD, -1));
+ t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
+ gimplify_and_add (t, pre_p);
+ }
+
+ std_gimplify_va_arg_expr (expr_p, pre_p, post_p);
+}
/* Return 1 if function return value returned in memory. Return 0 if it is
in a register. */
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index 79bc885..d0bc91d 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -52,6 +52,7 @@
#include "reload.h"
#include "cfglayout.h"
#include "sched-int.h"
+#include "tree-gimple.h"
#if TARGET_XCOFF
#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
#endif
@@ -439,6 +440,7 @@ static tree get_prev_label (tree function_name);
#endif
static tree rs6000_build_builtin_va_list (void);
+static void rs6000_gimplify_va_arg (tree *, tree *, tree *);
/* Hash table stuff for keeping track of TOC entries. */
@@ -647,6 +649,9 @@ static const char alt_reg_names[][8] =
#undef TARGET_BUILD_BUILTIN_VA_LIST
#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
+#undef TARGET_GIMPLIFY_VA_ARG_EXPR
+#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
+
struct gcc_target targetm = TARGET_INITIALIZER;
@@ -5287,6 +5292,243 @@ rs6000_va_arg (tree valist, tree type)
return addr_rtx;
}
+void
+rs6000_gimplify_va_arg (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree valist = TREE_OPERAND (*expr_p, 0);
+ tree type = TREE_TYPE (*expr_p);
+ tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
+ tree gpr, fpr, ovf, sav, reg, t, u;
+ int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
+ tree lab_false, lab_over, addr;
+ int align;
+ tree ptrtype = build_pointer_type (type);
+
+ if (DEFAULT_ABI != ABI_V4)
+ {
+ /* Variable sized types are passed by reference, as are AltiVec
+ vectors when 32-bit and not using the AltiVec ABI extension. */
+ if (int_size_in_bytes (type) < 0
+ || (TARGET_32BIT
+ && !TARGET_ALTIVEC_ABI
+ && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
+ {
+ /* Args grow upward. */
+ t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
+ t = build1 (NOP_EXPR, build_pointer_type (ptrtype), t);
+ t = build_fold_indirect_ref (t);
+ t = build_fold_indirect_ref (t);
+
+ *expr_p = t;
+ return;
+ }
+ if (targetm.calls.split_complex_arg
+ && TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tree elem_type = TREE_TYPE (type);
+ enum machine_mode elem_mode = TYPE_MODE (elem_type);
+ int elem_size = GET_MODE_SIZE (elem_mode);
+
+ if (elem_size < UNITS_PER_WORD)
+ {
+ tree real_part, imag_addr, dest_real, rr;
+ tree post = NULL_TREE;
+
+ /* This is a bit tricky because we can't just feed the
+ VA_ARG_EXPRs back into gimplify_expr; if we did,
+ gimplify_va_arg_expr would complain about trying to pass a
+ float. */
+ real_part = build1 (VA_ARG_EXPR, elem_type, valist);
+ rs6000_gimplify_va_arg (&real_part, pre_p, &post);
+ gimplify_expr (&real_part, pre_p, &post, is_gimple_val,
+ fb_rvalue);
+ append_to_statement_list (post, pre_p);
+
+ imag_addr = build1 (VA_ARG_EXPR, elem_type, valist);
+ rs6000_gimplify_va_arg (&imag_addr, pre_p, post_p);
+ imag_addr = build_fold_addr_expr (imag_addr);
+ gimplify_expr (&imag_addr, pre_p, post_p, is_gimple_val,
+ fb_rvalue);
+
+ /* We're not returning the value here, but the address.
+ real_part and imag_part are not contiguous, and we know
+ there is space available to pack real_part next to
+ imag_part. float _Complex is not promoted to
+ double _Complex by the default promotion rules that
+ promote float to double. */
+ if (2 * elem_size > UNITS_PER_WORD)
+ abort ();
+
+ dest_real = fold (build2 (MINUS_EXPR, TREE_TYPE (imag_addr),
+ imag_addr, ssize_int (elem_size)));
+ gimplify_expr (&dest_real, pre_p, post_p, is_gimple_val,
+ fb_rvalue);
+
+ rr = build_fold_indirect_ref (dest_real);
+ rr = build2 (MODIFY_EXPR, void_type_node, rr, real_part);
+ gimplify_and_add (rr, pre_p);
+
+ dest_real = convert (build_pointer_type (type), dest_real);
+ *expr_p = build_fold_indirect_ref (dest_real);
+
+ return;
+ }
+ }
+
+ std_gimplify_va_arg_expr (expr_p, pre_p, post_p);
+ return;
+ }
+
+ f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
+ f_fpr = TREE_CHAIN (f_gpr);
+ f_res = TREE_CHAIN (f_fpr);
+ f_ovf = TREE_CHAIN (f_res);
+ f_sav = TREE_CHAIN (f_ovf);
+
+ valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
+ gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
+ fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
+ ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
+ sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
+
+ size = int_size_in_bytes (type);
+ rsize = (size + 3) / 4;
+ align = 1;
+
+ if (AGGREGATE_TYPE_P (type)
+ || TYPE_MODE (type) == TFmode
+ || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
+ {
+ /* Aggregates, long doubles, and AltiVec vectors are passed by
+ reference. */
+ indirect_p = 1;
+ reg = gpr;
+ n_reg = 1;
+ sav_ofs = 0;
+ sav_scale = 4;
+ size = 4;
+ rsize = 1;
+ }
+ else if (TARGET_HARD_FLOAT && TARGET_FPRS
+ && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
+ {
+ /* FP args go in FP registers, if present. */
+ indirect_p = 0;
+ reg = fpr;
+ n_reg = 1;
+ sav_ofs = 8*4;
+ sav_scale = 8;
+ if (TYPE_MODE (type) == DFmode)
+ align = 8;
+ }
+ else
+ {
+ /* Otherwise into GP registers. */
+ indirect_p = 0;
+ reg = gpr;
+ n_reg = rsize;
+ sav_ofs = 0;
+ sav_scale = 4;
+ if (n_reg == 2)
+ align = 8;
+ }
+
+ /* Pull the value out of the saved registers.... */
+
+ lab_over = NULL;
+ addr = create_tmp_var (ptr_type_node, "addr");
+ DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
+
+ /* AltiVec vectors never go in registers when -mabi=altivec. */
+ if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
+ align = 16;
+ else
+ {
+ lab_false = create_artificial_label ();
+ lab_over = create_artificial_label ();
+
+ /* Long long and SPE vectors are aligned in the registers.
+ As are any other 2 gpr item such as complex int due to a
+ historical mistake. */
+ u = reg;
+ if (n_reg == 2)
+ {
+ u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
+ build_int_2 (n_reg - 1, 0));
+ u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
+ }
+
+ t = build_int_2 (8 - n_reg + 1, 0);
+ TREE_TYPE (t) = TREE_TYPE (reg);
+ t = build2 (GE_EXPR, boolean_type_node, u, t);
+ u = build1 (GOTO_EXPR, void_type_node, lab_false);
+ t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
+ gimplify_and_add (t, pre_p);
+
+ t = sav;
+ if (sav_ofs)
+ t = build2 (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
+
+ u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
+ build_int_2 (n_reg, 0));
+ u = build1 (CONVERT_EXPR, integer_type_node, u);
+ u = build2 (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
+ t = build2 (PLUS_EXPR, ptr_type_node, t, u);
+
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (GOTO_EXPR, void_type_node, lab_over);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (LABEL_EXPR, void_type_node, lab_false);
+ append_to_statement_list (t, pre_p);
+
+ if (n_reg > 2)
+ {
+ /* Ensure that we don't find any more args in regs.
+ Alignment has taken care of the n_reg == 2 case. */
+ t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
+ gimplify_and_add (t, pre_p);
+ }
+ }
+
+ /* ... otherwise out of the overflow area. */
+
+ /* Care for on-stack alignment if needed. */
+ t = ovf;
+ if (align != 1)
+ {
+ t = build2 (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
+ t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
+ }
+ gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
+
+ u = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (u, pre_p);
+
+ t = build2 (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
+ gimplify_and_add (t, pre_p);
+
+ if (lab_over)
+ {
+ t = build1 (LABEL_EXPR, void_type_node, lab_over);
+ append_to_statement_list (t, pre_p);
+ }
+
+ if (indirect_p)
+ {
+ addr = convert (build_pointer_type (ptrtype), addr);
+ addr = build_fold_indirect_ref (addr);
+ }
+ else
+ addr = convert (ptrtype, addr);
+
+ *expr_p = build_fold_indirect_ref (addr);
+}
+
/* Builtins. */
#define def_builtin(MASK, NAME, TYPE, CODE) \
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index b63f1c2..ecd59f8 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -10001,4 +10001,73 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
return tem;
}
+/* Build an expression for the address of T. Folds away INDIRECT_REF to
+ avoid confusing the gimplify process. */
+
+tree
+build_fold_addr_expr_with_type (tree t, tree ptrtype)
+{
+ if (TREE_CODE (t) == INDIRECT_REF)
+ {
+ t = TREE_OPERAND (t, 0);
+ if (TREE_TYPE (t) != ptrtype)
+ t = build1 (NOP_EXPR, ptrtype, t);
+ }
+ else
+ {
+ tree base = t;
+ while (TREE_CODE (base) == COMPONENT_REF
+ || TREE_CODE (base) == ARRAY_REF)
+ base = TREE_OPERAND (base, 0);
+ if (DECL_P (base))
+ TREE_ADDRESSABLE (base) = 1;
+
+ t = build1 (ADDR_EXPR, ptrtype, t);
+ }
+
+ return t;
+}
+
+tree
+build_fold_addr_expr (tree t)
+{
+ return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
+}
+
+/* Builds an expression for an indirection through T, simplifying some
+ cases. */
+
+tree
+build_fold_indirect_ref (tree t)
+{
+ tree type = TREE_TYPE (TREE_TYPE (t));
+ tree sub = t;
+ tree subtype;
+
+ STRIP_NOPS (sub);
+ if (TREE_CODE (sub) == ADDR_EXPR)
+ {
+ tree op = TREE_OPERAND (sub, 0);
+ tree optype = TREE_TYPE (op);
+ /* *&p => p */
+ if (lang_hooks.types_compatible_p (type, optype))
+ return op;
+ /* *(foo *)&fooarray => fooarray[0] */
+ else if (TREE_CODE (optype) == ARRAY_TYPE
+ && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ return build2 (ARRAY_REF, type, op, size_zero_node);
+ }
+
+ /* *(foo *)fooarrptr => (*fooarrptr)[0] */
+ subtype = TREE_TYPE (sub);
+ if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
+ && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ {
+ sub = build_fold_indirect_ref (sub);
+ return build2 (ARRAY_REF, type, sub, size_zero_node);
+ }
+
+ return build1 (INDIRECT_REF, type, t);
+}
+
#include "gt-fold-const.h"
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index c6378aa..c25a891 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -45,6 +45,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "output.h"
#include "expr.h"
#include "ggc.h"
+#include "target.h"
static struct gimplify_ctx
{
@@ -237,6 +238,15 @@ append_to_statement_list_force (tree t, tree *list_p)
append_to_statement_list_1 (t, list_p, t != NULL);
}
+/* Both gimplify the statement T and append it to LIST_P. */
+
+void
+gimplify_and_add (tree t, tree *list_p)
+{
+ gimplify_stmt (&t);
+ append_to_statement_list (t, list_p);
+}
+
/* Add T to the end of a COMPOUND_EXPR pointed by LIST_P. The type
of the result is the type of T. */
@@ -668,13 +678,17 @@ copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
else
{
TREE_VISITED (t) = 1;
- if (TREE_CODE (*tp) == VA_ARG_EXPR)
+ if (TREE_CODE (*tp) == VA_ARG_EXPR
+ && targetm.calls.gimplify_va_arg_expr == NULL)
{
/* Mark any _DECL inside the operand as volatile to avoid
the optimizers messing around with it. We have to do this
early, otherwise we might mark a variable as volatile
after we gimplify other statements that use the variable
assuming it's not volatile. */
+
+ /* FIXME once most targets define the above hook, this should
+ go away (perhaps along with the #include "target.h"). */
walk_tree (&TREE_OPERAND (*tp, 0), mark_decls_volatile_r,
NULL, NULL);
}
@@ -1633,39 +1647,6 @@ gimplify_minimax_expr (tree *expr_p, tree *pre_p, tree *post_p)
return GS_OK;
}
-/* Build an expression for the address of T. Folds away INDIRECT_REF to
- avoid confusing the gimplify process. */
-
-static tree
-build_addr_expr_with_type (tree t, tree ptrtype)
-{
- if (TREE_CODE (t) == INDIRECT_REF)
- {
- t = TREE_OPERAND (t, 0);
- if (TREE_TYPE (t) != ptrtype)
- t = build1 (NOP_EXPR, ptrtype, t);
- }
- else
- {
- tree base = t;
- while (TREE_CODE (base) == COMPONENT_REF
- || TREE_CODE (base) == ARRAY_REF)
- base = TREE_OPERAND (base, 0);
- if (DECL_P (base))
- TREE_ADDRESSABLE (base) = 1;
-
- t = build1 (ADDR_EXPR, ptrtype, t);
- }
-
- return t;
-}
-
-static tree
-build_addr_expr (tree t)
-{
- return build_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
-}
-
/* Subroutine of gimplify_compound_lval and gimplify_array_ref.
Converts an ARRAY_REF to the equivalent *(&array + offset) form. */
@@ -1716,7 +1697,7 @@ gimplify_array_ref_to_plus (tree *expr_p, tree *pre_p, tree *post_p)
if (ret == GS_ERROR)
return ret;
- addr = build_addr_expr_with_type (array, ptrtype);
+ addr = build_fold_addr_expr_with_type (array, ptrtype);
result = fold (build (add_code, ptrtype, addr, offset));
*expr_p = build1 (INDIRECT_REF, elttype, result);
@@ -2533,9 +2514,9 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
t = TYPE_SIZE_UNIT (TREE_TYPE (*to_p));
t = unshare_expr (t);
args = tree_cons (NULL, t, NULL);
- t = build_addr_expr (*from_p);
+ t = build_fold_addr_expr (*from_p);
args = tree_cons (NULL, t, args);
- dest = build_addr_expr (*to_p);
+ dest = build_fold_addr_expr (*to_p);
args = tree_cons (NULL, dest, args);
t = implicit_built_in_decls[BUILT_IN_MEMCPY];
t = build_function_call_expr (t, args);
@@ -3239,14 +3220,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
break;
case VA_ARG_EXPR:
- /* Mark any _DECL inside the operand as volatile to avoid the
- optimizers messing around with it. FIXME: Remove this once
- VA_ARG_EXPRs are properly lowered. */
- walk_tree (&TREE_OPERAND (*expr_p, 0), mark_decls_volatile_r,
- NULL, NULL);
-
- /* va_arg expressions are in GIMPLE form already. */
- ret = GS_ALL_DONE;
+ ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
break;
case CONVERT_EXPR:
@@ -3586,7 +3560,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
/* An lvalue will do. Take the address of the expression, store it
in a temporary, and replace the expression with an INDIRECT_REF of
that temporary. */
- tmp = build_addr_expr (*expr_p);
+ tmp = build_fold_addr_expr (*expr_p);
gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
*expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
}
diff --git a/gcc/target-def.h b/gcc/target-def.h
index f8c832a..40e9610 100644
--- a/gcc/target-def.h
+++ b/gcc/target-def.h
@@ -358,6 +358,14 @@ Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
default_pretend_outgoing_varargs_named
#define TARGET_SPLIT_COMPLEX_ARG NULL
+#ifdef EXPAND_BUILTIN_VA_ARG
+/* If there's a target-specific va_arg expander, there needs to be a
+ target-specific gimplifier. */
+#define TARGET_GIMPLIFY_VA_ARG_EXPR NULL
+#else
+#define TARGET_GIMPLIFY_VA_ARG_EXPR std_gimplify_va_arg_expr
+#endif
+
#define TARGET_CALLS { \
TARGET_PROMOTE_FUNCTION_ARGS, \
TARGET_PROMOTE_FUNCTION_RETURN, \
@@ -370,6 +378,7 @@ Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
TARGET_STRICT_ARGUMENT_NAMING, \
TARGET_PRETEND_OUTGOING_VARARGS_NAMED, \
TARGET_SPLIT_COMPLEX_ARG, \
+ TARGET_GIMPLIFY_VA_ARG_EXPR, \
}
/* The whole shebang. */
diff --git a/gcc/target.h b/gcc/target.h
index a7eb743..a2a5646 100644
--- a/gcc/target.h
+++ b/gcc/target.h
@@ -470,6 +470,10 @@ struct gcc_target
/* Given a complex type T, return true if a parameter of type T
should be passed as two scalars. */
bool (* split_complex_arg) (tree type);
+
+ /* Gimplifies a VA_ARG_EXPR. */
+ void (* gimplify_va_arg_expr) (tree *expr_p, tree *pre_p,
+ tree *post_p);
} calls;
/* Leave the boolean fields at the end. */
diff --git a/gcc/tree-gimple.h b/gcc/tree-gimple.h
index 59cb3b3..eb3dea8 100644
--- a/gcc/tree-gimple.h
+++ b/gcc/tree-gimple.h
@@ -107,6 +107,7 @@ void gimplify_to_stmt_list (tree *);
void gimplify_body (tree *, tree);
void push_gimplify_context (void);
void pop_gimplify_context (tree);
+void gimplify_and_add (tree, tree *);
/* Miscellaneous helpers. */
tree get_base_address (tree t);
@@ -121,6 +122,8 @@ tree build_and_jump (tree *);
tree alloc_stmt_list (void);
void free_stmt_list (tree);
tree force_labels_r (tree *, int *, void *);
+enum gimplify_status gimplify_va_arg_expr (tree *, tree *, tree *);
+void std_gimplify_va_arg_expr (tree *, tree *, tree *);
/* In tree-nested.c. */
extern void lower_nested_functions (tree);
diff --git a/gcc/tree.h b/gcc/tree.h
index c6a9f07..03c6415 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -1602,7 +1602,7 @@ struct tree_block GTY(())
#define TYPE_VECTOR_SUBPARTS(VECTOR_TYPE) \
GET_MODE_NUNITS (VECTOR_TYPE_CHECK (VECTOR_TYPE)->type.mode)
- /* Indicates that objects of this type must be initialized by calling a
+/* Indicates that objects of this type must be initialized by calling a
function when they are created. */
#define TYPE_NEEDS_CONSTRUCTING(NODE) \
(TYPE_CHECK (NODE)->type.needs_constructing_flag)
@@ -3468,6 +3468,9 @@ extern tree nondestructive_fold_unary_to_constant (enum tree_code, tree, tree);
extern tree nondestructive_fold_binary_to_constant (enum tree_code, tree, tree, tree);
extern tree fold_read_from_constant_string (tree);
extern tree int_const_binop (enum tree_code, tree, tree, int);
+extern tree build_fold_addr_expr (tree);
+extern tree build_fold_addr_expr_with_type (tree, tree);
+extern tree build_fold_indirect_ref (tree);
/* In builtins.c */
extern tree fold_builtin (tree);