Newer
Older
static void
build_stack_save_restore (tree *save, tree *restore)
{
tree save_call, tmp_var;
save_call =
build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
NULL_TREE);
tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
*save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
*restore =
build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
tree_cons (NULL_TREE, tmp_var, NULL_TREE));
}
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
{
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
temp = voidify_wrapper_expr (bind_expr, temp);
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
Richard Henderson
committed
{
if (TREE_CODE (t) == VAR_DECL)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
/* Mark variable as local. */
if (ctx && !is_global_var (t)
&& (! DECL_SEEN_IN_BIND_EXPR_P (t)
|| splay_tree_lookup (ctx->variables,
(splay_tree_key) t) == NULL))
omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
}
Richard Henderson
committed
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
}
gimple_push_bind_expr (bind_expr);
gimplify_ctxp->save_stack = false;
gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
if (gimplify_ctxp->save_stack)
{
tree stack_save, stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
block to achieve this. Note that mudflap depends on the
format of the emitted code: see mx_register_decls(). */
build_stack_save_restore (&stack_save, &stack_restore);
t = build2 (TRY_FINALLY_EXPR, void_type_node,
BIND_EXPR_BODY (bind_expr), NULL_TREE);
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
BIND_EXPR_BODY (bind_expr) = NULL_TREE;
append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
}
gimplify_ctxp->save_stack = old_save_stack;
gimple_pop_bind_expr ();
if (temp)
{
*expr_p = temp;
append_to_statement_list (bind_expr, pre_p);
return GS_OK;
}
else
return GS_ALL_DONE;
}
/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
GIMPLE value, it is assigned to a new temporary and the statement is
re-written to return the temporary.
PRE_P points to the list where side effects that must happen before
STMT should be stored. */
static enum gimplify_status
gimplify_return_expr (tree stmt, tree *pre_p)
{
tree ret_expr = TREE_OPERAND (stmt, 0);
Richard Henderson
committed
tree result_decl, result;
Paolo Bonzini
committed
if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
|| ret_expr == error_mark_node)
return GS_ALL_DONE;
if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
Richard Henderson
committed
result_decl = NULL_TREE;
Richard Henderson
committed
result_decl = TREE_OPERAND (ret_expr, 0);
if (TREE_CODE (result_decl) == INDIRECT_REF)
/* See through a return by reference. */
result_decl = TREE_OPERAND (result_decl, 0);
gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
|| TREE_CODE (ret_expr) == INIT_EXPR)
&& TREE_CODE (result_decl) == RESULT_DECL);
Richard Henderson
committed
/* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
Recall that aggregate_value_p is FALSE for any aggregate type that is
returned in registers. If we're returning values in registers, then
we don't want to extend the lifetime of the RESULT_DECL, particularly
across another call. In addition, for those aggregates for which
hard_function_value generates a PARALLEL, we'll die during normal
Richard Henderson
committed
expansion of structure assignments; there's special code in expand_return
to handle this case that does not exist in expand_expr. */
if (!result_decl
|| aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
result = result_decl;
else if (gimplify_ctxp->return_temp)
result = gimplify_ctxp->return_temp;
else
{
result = create_tmp_var (TREE_TYPE (result_decl), NULL);
Richard Henderson
committed
/* ??? With complex control flow (usually involving abnormal edges),
we can wind up warning about an uninitialized value for this. Due
to how this variable is constructed and initialized, this is never
true. Give up and never warn. */
TREE_NO_WARNING (result) = 1;
Richard Henderson
committed
gimplify_ctxp->return_temp = result;
}
/* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
Then gimplify the whole thing. */
if (result != result_decl)
TREE_OPERAND (ret_expr, 0) = result;
gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
Richard Henderson
committed
/* If we didn't use a temporary, then the result is just the result_decl.
Otherwise we need a simple copy. This should already be gimple. */
if (result == result_decl)
ret_expr = result;
else
ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
Richard Henderson
committed
TREE_OPERAND (stmt, 0) = ret_expr;
return GS_ALL_DONE;
}
/* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
and initialization explicit. */
static enum gimplify_status
gimplify_decl_expr (tree *stmt_p)
{
tree stmt = *stmt_p;
tree decl = DECL_EXPR_DECL (stmt);
*stmt_p = NULL_TREE;
if (TREE_TYPE (decl) == error_mark_node)
return GS_ERROR;
if ((TREE_CODE (decl) == TYPE_DECL
|| TREE_CODE (decl) == VAR_DECL)
&& !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
{
tree init = DECL_INITIAL (decl);
if (!TREE_CONSTANT (DECL_SIZE (decl)))
{
/* This is a variable-sized decl. Simplify its size and mark it
for deferred expansion. Note that mudflap depends on the format
of the emitted code: see mx_register_decls(). */
tree t, args, addr, ptr_type;
gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
/* All occurrences of this decl in final gimplified code will be
replaced by indirection. Setting DECL_VALUE_EXPR does two
things: First, it lets the rest of the gimplifier know what
replacement to use. Second, it lets the debug info know
where to find the value. */
ptr_type = build_pointer_type (TREE_TYPE (decl));
addr = create_tmp_var (ptr_type, get_name (decl));
DECL_IGNORED_P (addr) = 0;
t = build_fold_indirect_ref (addr);
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
t = built_in_decls[BUILT_IN_ALLOCA];
t = build_function_call_expr (t, args);
t = fold_convert (ptr_type, t);
t = build2 (MODIFY_EXPR, void_type_node, addr, t);
gimplify_and_add (t, stmt_p);
/* Indicate that we need to restore the stack level when the
enclosing BIND_EXPR is exited. */
gimplify_ctxp->save_stack = true;
}
if (init && init != error_mark_node)
{
if (!TREE_STATIC (decl))
{
DECL_INITIAL (decl) = NULL_TREE;
init = build2 (INIT_EXPR, void_type_node, decl, init);
gimplify_and_add (init, stmt_p);
}
else
/* We must still examine initializers for static variables
as they may contain a label address. */
walk_tree (&init, force_labels_r, NULL, NULL);
}
/* Some front ends do not explicitly declare all anonymous
artificial variables. We compensate here by declaring the
variables, though it would be better if the front ends would
explicitly declare them. */
if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
&& DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
gimple_add_tmp_var (decl);
}
return GS_ALL_DONE;
}
/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
and replacing the LOOP_EXPR with goto, but if the loop contains an
EXIT_EXPR, we need to append a label for it to jump to. */
static enum gimplify_status
gimplify_loop_expr (tree *expr_p, tree *pre_p)
{
tree saved_label = gimplify_ctxp->exit_label;
tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
append_to_statement_list (start_label, pre_p);
gimplify_ctxp->exit_label = NULL_TREE;
gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
if (gimplify_ctxp->exit_label)
{
append_to_statement_list (jump_stmt, pre_p);
*expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
}
else
*expr_p = jump_stmt;
gimplify_ctxp->exit_label = saved_label;
return GS_ALL_DONE;
}
/* Compare two case labels. Because the front end should already have
made sure that case ranges do not overlap, it is enough to only compare
the CASE_LOW values of each case label. */
static int
compare_case_labels (const void *p1, const void *p2)
{
tree case1 = *(tree *)p1;
tree case2 = *(tree *)p2;
return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
}
Steven Bosscher
committed
/* Sort the case labels in LABEL_VEC in place in ascending order. */
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
void
sort_case_labels (tree label_vec)
{
size_t len = TREE_VEC_LENGTH (label_vec);
tree default_case = TREE_VEC_ELT (label_vec, len - 1);
if (CASE_LOW (default_case))
{
size_t i;
/* The last label in the vector should be the default case
but it is not. */
for (i = 0; i < len; ++i)
{
tree t = TREE_VEC_ELT (label_vec, i);
if (!CASE_LOW (t))
{
default_case = t;
TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
TREE_VEC_ELT (label_vec, len - 1) = default_case;
break;
}
}
}
qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
compare_case_labels);
}
/* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
branch to. */
static enum gimplify_status
gimplify_switch_expr (tree *expr_p, tree *pre_p)
{
tree switch_expr = *expr_p;
enum gimplify_status ret;
ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
is_gimple_val, fb_rvalue);
if (SWITCH_BODY (switch_expr))
{
VEC(tree,heap) *labels, *saved_labels;
tree label_vec, default_case = NULL_TREE;
size_t i, len;
/* If someone can be bothered to fill in the labels, they can
be bothered to null out the body too. */
gcc_assert (!SWITCH_LABELS (switch_expr));
saved_labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
Eric Botcazou
committed
i = 0;
while (i < VEC_length (tree, labels))
Eric Botcazou
committed
tree elt = VEC_index (tree, labels, i);
tree low = CASE_LOW (elt);
bool remove_element = FALSE;
if (low)
{
/* Discard empty ranges. */
tree high = CASE_HIGH (elt);
if (high && INT_CST_LT (high, low))
remove_element = TRUE;
}
else
/* The default case must be the last label in the list. */
Eric Botcazou
committed
gcc_assert (!default_case);
default_case = elt;
remove_element = TRUE;
Eric Botcazou
committed
if (remove_element)
VEC_ordered_remove (tree, labels, i);
else
i++;
Eric Botcazou
committed
len = i;
label_vec = make_tree_vec (len + 1);
SWITCH_LABELS (*expr_p) = label_vec;
append_to_statement_list (switch_expr, pre_p);
/* If the switch has no default label, add one, so that we jump
around the switch body. */
default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
NULL_TREE, create_artificial_label ());
append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
*expr_p = build1 (LABEL_EXPR, void_type_node,
CASE_LABEL (default_case));
}
else
*expr_p = SWITCH_BODY (switch_expr);
for (i = 0; i < len; ++i)
TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
TREE_VEC_ELT (label_vec, len) = default_case;
VEC_free (tree, heap, labels);
sort_case_labels (label_vec);
SWITCH_BODY (switch_expr) = NULL;
}
else
gcc_assert (SWITCH_LABELS (switch_expr));
return ret;
}
static enum gimplify_status
gimplify_case_label_expr (tree *expr_p)
{
tree expr = *expr_p;
struct gimplify_ctx *ctxp;
/* Invalid OpenMP programs can play Duff's Device type games with
#pragma omp parallel. At least in the C front end, we don't
detect such invalid branches until after gimplification. */
for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
if (ctxp->case_labels)
break;
VEC_safe_push (tree, heap, ctxp->case_labels, expr);
*expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
return GS_ALL_DONE;
}
/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
if necessary. */
tree
build_and_jump (tree *label_p)
{
if (label_p == NULL)
/* If there's nowhere to jump, just fall through. */
return NULL_TREE;
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
if (*label_p == NULL_TREE)
{
tree label = create_artificial_label ();
*label_p = label;
}
return build1 (GOTO_EXPR, void_type_node, *label_p);
}
/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
This also involves building a label to jump to and communicating it to
gimplify_loop_expr through gimplify_ctxp->exit_label. */
static enum gimplify_status
gimplify_exit_expr (tree *expr_p)
{
tree cond = TREE_OPERAND (*expr_p, 0);
tree expr;
expr = build_and_jump (&gimplify_ctxp->exit_label);
expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
*expr_p = expr;
return GS_OK;
}
/* A helper function to be called via walk_tree. Mark all labels under *TP
as being forced. To be called for DECL_INITIAL of static variables. */
tree
force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
if (TYPE_P (*tp))
*walk_subtrees = 0;
if (TREE_CODE (*tp) == LABEL_DECL)
FORCED_LABEL (*tp) = 1;
return NULL_TREE;
}
Richard Henderson
committed
/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
different from its canonical type, wrap the whole thing inside a
NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
type.
Richard Henderson
committed
The canonical type of a COMPONENT_REF is the type of the field being
referenced--unless the field is a bit-field which can be read directly
in a smaller mode, in which case the canonical type is the
sign-appropriate type corresponding to that mode. */
Richard Henderson
committed
static void
canonicalize_component_ref (tree *expr_p)
Richard Henderson
committed
tree expr = *expr_p;
tree type;
gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
Richard Henderson
committed
if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
else
type = TREE_TYPE (TREE_OPERAND (expr, 1));
Richard Henderson
committed
if (TREE_TYPE (expr) != type)
Richard Henderson
committed
tree old_type = TREE_TYPE (expr);
Richard Henderson
committed
/* Set the type of the COMPONENT_REF to the underlying type. */
TREE_TYPE (expr) = type;
Richard Henderson
committed
/* And wrap the whole thing inside a NOP_EXPR. */
expr = build1 (NOP_EXPR, old_type, expr);
Richard Henderson
committed
*expr_p = expr;
}
}
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo to a pointer
to foo, embed that change in the ADDR_EXPR by converting
Richard Henderson
committed
T array[U];
(T *)&array
==>
&array[L]
where L is the lower bound. For simplicity, only do this for constant
lower bound. */
Richard Henderson
committed
static void
canonicalize_addr_expr (tree *expr_p)
{
tree expr = *expr_p;
tree ctype = TREE_TYPE (expr);
tree addr_expr = TREE_OPERAND (expr, 0);
tree atype = TREE_TYPE (addr_expr);
tree dctype, datype, ddatype, otype, obj_expr;
Richard Henderson
committed
/* Both cast and addr_expr types should be pointers. */
if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
return;
Richard Henderson
committed
/* The addr_expr type should be a pointer to an array. */
datype = TREE_TYPE (atype);
if (TREE_CODE (datype) != ARRAY_TYPE)
return;
Richard Henderson
committed
/* Both cast and addr_expr types should address the same object type. */
dctype = TREE_TYPE (ctype);
ddatype = TREE_TYPE (datype);
if (!lang_hooks.types_compatible_p (ddatype, dctype))
return;
Richard Henderson
committed
/* The addr_expr and the object type should match. */
obj_expr = TREE_OPERAND (addr_expr, 0);
otype = TREE_TYPE (obj_expr);
if (!lang_hooks.types_compatible_p (otype, datype))
return;
Richard Henderson
committed
/* The lower bound and element sizes must be constant. */
if (!TYPE_SIZE_UNIT (dctype)
|| TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
Richard Henderson
committed
|| !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
|| TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
return;
Richard Henderson
committed
/* All checks succeeded. Build a new node to merge the cast. */
*expr_p = build4 (ARRAY_REF, dctype, obj_expr,
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
size_int (TYPE_ALIGN_UNIT (dctype))));
Richard Henderson
committed
*expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
}
Richard Henderson
committed
/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
underneath as appropriate. */
Richard Henderson
committed
static enum gimplify_status
gimplify_conversion (tree *expr_p)
gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
|| TREE_CODE (*expr_p) == CONVERT_EXPR);
/* Then strip away all but the outermost conversion. */
STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
/* And remove the outermost conversion if it's useless. */
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
{
tree sub = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If a NOP conversion is changing the type of a COMPONENT_REF
expression, then canonicalize its type now in order to expose more
redundant conversions. */
if (TREE_CODE (sub) == COMPONENT_REF)
canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo
to a pointer to foo, embed that change in the ADDR_EXPR. */
else if (TREE_CODE (sub) == ADDR_EXPR)
canonicalize_addr_expr (expr_p);
}
return GS_OK;
}
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
/* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
DECL_VALUE_EXPR, and it's worth re-examining things. */
static enum gimplify_status
gimplify_var_or_parm_decl (tree *expr_p)
{
tree decl = *expr_p;
/* ??? If this is a local variable, and it has not been seen in any
outer BIND_EXPR, then it's probably the result of a duplicate
declaration, for which we've already issued an error. It would
be really nice if the front end wouldn't leak these at all.
Currently the only known culprit is C++ destructors, as seen
in g++.old-deja/g++.jason/binding.C. */
if (TREE_CODE (decl) == VAR_DECL
&& !DECL_SEEN_IN_BIND_EXPR_P (decl)
&& !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
&& decl_function_context (decl) == current_function_decl)
{
gcc_assert (errorcount || sorrycount);
return GS_ERROR;
}
/* When within an OpenMP context, notice uses of variables. */
if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
return GS_ALL_DONE;
/* If the decl is an alias for another expression, substitute it now. */
if (DECL_HAS_VALUE_EXPR_P (decl))
{
*expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
return GS_OK;
}
return GS_ALL_DONE;
}
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
compound_lval
: min_lval '[' val ']'
| min_lval '.' ID
| compound_lval '[' val ']'
| compound_lval '.' ID
This is not part of the original SIMPLE definition, which separates
array and member references, but it seems reasonable to handle them
together. Also, this way we don't run into problems with union
aliasing; gcc requires that for accesses through a union to alias, the
union reference must be explicit, which was not always the case when we
were splitting up array and member refs.
PRE_P points to the list where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the list where side effects that must happen after
*EXPR_P should be stored. */
static enum gimplify_status
gimplify_compound_lval (tree *expr_p, tree *pre_p,
Richard Henderson
committed
tree *post_p, fallback_t fallback)
VEC(tree,heap) *stack;
enum gimplify_status ret = GS_OK, tret;
int i;
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
stack = VEC_alloc (tree, heap, 10);
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
{
restart:
/* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
if (TREE_CODE (*p) == INDIRECT_REF)
*p = fold_indirect_ref (*p);
if (handled_component_p (*p))
;
/* Expand DECL_VALUE_EXPR now. In some cases that may expose
additional COMPONENT_REFs. */
else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
&& gimplify_var_or_parm_decl (p) == GS_OK)
goto restart;
else
VEC_safe_push (tree, heap, stack, *p);
gcc_assert (VEC_length (tree, stack));
Richard Kenner
committed
/* Now STACK is a stack of pointers to all the refs we've walked through
and P points to the innermost expression.
Java requires that we elaborated nodes in source order. That
means we must gimplify the inner expression followed by each of
the indices, in order. But we can't gimplify the inner
expression until we deal with any variable bounds, sizes, or
positions in order to deal with PLACEHOLDER_EXPRs.
So we do this in three steps. First we deal with the annotations
for any variables in the components, then we gimplify the base,
then we gimplify any indices, from left to right. */
for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
tree t = VEC_index (tree, stack, i);
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
/* Gimplify the low bound and element type size and put them into
the ARRAY_REF. If these values are set, they have already been
gimplified. */
if (!TREE_OPERAND (t, 2))
{
Richard Henderson
committed
tree low = unshare_expr (array_ref_low_bound (t));
if (!is_gimple_min_invariant (low))
{
Richard Henderson
committed
TREE_OPERAND (t, 2) = low;
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
is_gimple_formal_tmp_reg, fb_rvalue);
ret = MIN (ret, tret);
}
}
if (!TREE_OPERAND (t, 3))
{
tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
tree elmt_size = unshare_expr (array_ref_element_size (t));
tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
/* Divide the element size by the alignment of the element
type (above). */
elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
Richard Henderson
committed
if (!is_gimple_min_invariant (elmt_size))
{
Richard Henderson
committed
TREE_OPERAND (t, 3) = elmt_size;
tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
is_gimple_formal_tmp_reg, fb_rvalue);
ret = MIN (ret, tret);
}
else if (TREE_CODE (t) == COMPONENT_REF)
{
/* Set the field offset into T and gimplify it. */
if (!TREE_OPERAND (t, 2))
{
tree offset = unshare_expr (component_ref_field_offset (t));
tree field = TREE_OPERAND (t, 1);
tree factor
= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
/* Divide the offset by its alignment. */
offset = size_binop (EXACT_DIV_EXPR, offset, factor);
Richard Henderson
committed
if (!is_gimple_min_invariant (offset))
{
Richard Henderson
committed
TREE_OPERAND (t, 2) = offset;
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
is_gimple_formal_tmp_reg, fb_rvalue);
ret = MIN (ret, tret);
}
}
}
}
/* Step 2 is to gimplify the base expression. Make sure lvalue is set
so as to match the min_lval predicate. Failure to do so may result
in the creation of large aggregate temporaries. */
tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
fallback | fb_lvalue);
ret = MIN (ret, tret);
/* And finally, the indices and operands to BIT_FIELD_REF. During this
loop we also remove any useless conversions. */
for (; VEC_length (tree, stack) > 0; )
tree t = VEC_pop (tree, stack);
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
/* Gimplify the dimension.
Temporary fix for gcc.c-torture/execute/20040313-1.c.
Gimplify non-constant array indices into a temporary
variable.
FIXME - The real fix is to gimplify post-modify
expressions into a minimal gimple lvalue. However, that
exposes bugs in alias analysis. The alias analyzer does
not handle &PTR->FIELD very well. Will fix after the
branch is merged into mainline (dnovillo 2004-05-03). */
if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
{
tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
is_gimple_formal_tmp_reg, fb_rvalue);
ret = MIN (ret, tret);
}
}
else if (TREE_CODE (t) == BIT_FIELD_REF)
{
tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
is_gimple_val, fb_rvalue);
ret = MIN (ret, tret);
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
is_gimple_val, fb_rvalue);
ret = MIN (ret, tret);
}
STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
/* The innermost expression P may have originally had TREE_SIDE_EFFECTS
set which would have caused all the outer expressions in EXPR_P
leading to P to also have had TREE_SIDE_EFFECTS set. */
recalculate_side_effects (t);
}
Richard Henderson
committed
tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
ret = MIN (ret, tret);
/* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
Richard Henderson
committed
if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
{
canonicalize_component_ref (expr_p);
ret = MIN (ret, GS_OK);
}
VEC_free (tree, heap, stack);
/* Gimplify the self modifying expression pointed to by EXPR_P
(++, --, +=, -=).
PRE_P points to the list where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the list where side effects that must happen after
*EXPR_P should be stored.
WANT_VALUE is nonzero iff we want to use the value of this expression
in another expression. */
static enum gimplify_status
gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
bool want_value)
{
enum tree_code code;
tree lhs, lvalue, rhs, t1;
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
code = TREE_CODE (*expr_p);
gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
|| code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
/* Prefix or postfix? */
if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
/* Faster to treat as prefix if result is not used. */
postfix = want_value;
else
postfix = false;
/* Add or subtract? */
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
arith_code = PLUS_EXPR;
else
arith_code = MINUS_EXPR;
/* Gimplify the LHS into a GIMPLE lvalue. */
lvalue = TREE_OPERAND (*expr_p, 0);
ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
/* Extract the operands to the arithmetic operation. */
lhs = lvalue;
rhs = TREE_OPERAND (*expr_p, 1);
/* For postfix operator, we evaluate the LHS to an rvalue and then use
that as the result value and in the postqueue operation. */
if (postfix)
{
ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
if (ret == GS_ERROR)
return ret;
}
t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
gimplify_and_add (t1, post_p);
*expr_p = lhs;
return GS_ALL_DONE;
}
else
{
*expr_p = t1;
return GS_OK;
}
}
/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
static void
maybe_with_size_expr (tree *expr_p)
{
Richard Kenner
committed
tree expr = *expr_p;
tree type = TREE_TYPE (expr);
tree size;
Richard Kenner
committed
/* If we've already wrapped this or the type is error_mark_node, we can't do
anything. */
if (TREE_CODE (expr) == WITH_SIZE_EXPR
|| type == error_mark_node)
Richard Kenner
committed
/* If the size isn't known or is a constant, we have nothing to do. */
Richard Kenner
committed
if (!size || TREE_CODE (size) == INTEGER_CST)
return;
/* Otherwise, make a WITH_SIZE_EXPR. */
size = unshare_expr (size);
size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
*expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
/* Subroutine of gimplify_call_expr: Gimplify a single argument. */
static enum gimplify_status
gimplify_arg (tree *expr_p, tree *pre_p)
{
bool (*test) (tree);
fallback_t fb;
/* In general, we allow lvalues for function arguments to avoid
extra overhead of copying large aggregates out of even larger
aggregates into temporaries only to copy the temporaries to
the argument list. Make optimizers happy by pulling out to
temporaries those types that fit in registers. */
if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
test = is_gimple_val, fb = fb_rvalue;
else
test = is_gimple_lvalue, fb = fb_either;
/* If this is a variable sized type, we must remember the size. */
maybe_with_size_expr (expr_p);
/* There is a sequence point before a function call. Side effects in
the argument list must occur before the actual call. So, when
gimplifying arguments, force gimplify_expr to use an internal
post queue which is then appended to the end of PRE_P. */
return gimplify_expr (expr_p, pre_p, NULL, test, fb);
}
/* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
Richard Henderson
committed
list where side effects that must happen before *EXPR_P should be stored.
WANT_VALUE is true if the result of the call is desired. */
static enum gimplify_status
Richard Henderson
committed
gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
{
tree decl;
tree arglist;
enum gimplify_status ret;
gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
/* For reliable diagnostics during inlining, it is necessary that
every call_expr be annotated with file and line. */
if (! EXPR_HAS_LOCATION (*expr_p))
SET_EXPR_LOCATION (*expr_p, input_location);
/* This may be a call to a builtin function.
Builtin function calls may be transformed into different
(and more efficient) builtin function calls under certain