Newer
Older
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
}
break;
case COMPOUND_EXPR:
/* Advance to the last statement. Set all container types to
void. */
for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
{
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
}
break;
case TRANSACTION_EXPR:
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
p = &TRANSACTION_EXPR_BODY (*p);
break;
default:
/* Assume that any tree upon which voidify_wrapper_expr is
directly called is a wrapper, and that its body is op0. */
if (p == &wrapper)
{
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
p = &TREE_OPERAND (*p, 0);
break;
}
goto out;
out:
if (p == NULL || IS_EMPTY_STMT (*p))
temp = NULL_TREE;
else if (temp)
/* The wrapper is on the RHS of an assignment that we're pushing
down. */
gcc_assert (TREE_CODE (temp) == INIT_EXPR
|| TREE_CODE (temp) == MODIFY_EXPR);
TREE_OPERAND (temp, 1) = *p;
*p = temp;
temp = create_tmp_var (type, "retval");
*p = build2 (INIT_EXPR, type, temp, *p);
}
return temp;
}
return NULL_TREE;
}
/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
a temporary through which they communicate. */
build_stack_save_restore (gcall **save, gcall **restore)
tree tmp_var;
Michael Meissner
committed
*save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
gimple_call_set_lhs (*save, tmp_var);
Michael Meissner
committed
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
}
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
{
tree bind_expr = *expr_p;
Eric Botcazou
committed
bool old_keep_stack = gimplify_ctxp->keep_stack;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
gimple_seq body, cleanup;
location_t start_locus = 0, end_locus = 0;
tree ret_clauses = NULL;
tree temp = voidify_wrapper_expr (bind_expr, NULL);
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
Richard Henderson
committed
{
if (TREE_CODE (t) == VAR_DECL)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
/* Mark variable as local. */
if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
&& (! DECL_SEEN_IN_BIND_EXPR_P (t)
|| splay_tree_lookup (ctx->variables,
(splay_tree_key) t) == NULL))
{
if (ctx->region_type == ORT_SIMD
&& TREE_ADDRESSABLE (t)
&& !TREE_STATIC (t))
omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
else
omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
}
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
cfun->has_local_explicit_reg_vars = true;
Richard Henderson
committed
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
Richard Guenther
committed
as we find them. */
if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
|| TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
Richard Henderson
committed
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
Andrew Pinski
committed
DECL_GIMPLE_REG_P (t) = 1;
Richard Henderson
committed
}
bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
Eric Botcazou
committed
BIND_EXPR_BLOCK (bind_expr));
gimple_push_bind_expr (bind_stmt);
Eric Botcazou
committed
gimplify_ctxp->keep_stack = false;
gimplify_ctxp->save_stack = false;
/* Gimplify the body into the GIMPLE_BIND tuple's body. */
body = NULL;
gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
gimple_bind_set_body (bind_stmt, body);
/* Source location wise, the cleanup code (stack_restore and clobbers)
belongs to the end of the block, so propagate what we have. The
stack_save operation belongs to the beginning of block, which we can
infer from the bind_expr directly if the block has no explicit
assignment. */
if (BIND_EXPR_BLOCK (bind_expr))
{
end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
}
if (start_locus == 0)
start_locus = EXPR_LOCATION (bind_expr);
cleanup = NULL;
stack_save = NULL;
Eric Botcazou
committed
/* If the code both contains VLAs and calls alloca, then we cannot reclaim
the stack space allocated to the VLAs. */
if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
gcall *stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
build_stack_save_restore (&stack_save, &stack_restore);
gimple_set_location (stack_save, start_locus);
gimple_set_location (stack_restore, end_locus);
gimplify_seq_add_stmt (&cleanup, stack_restore);
}
/* Add clobbers for all variables that go out of scope. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
{
if (TREE_CODE (t) == VAR_DECL
&& !is_global_var (t)
&& DECL_CONTEXT (t) == current_function_decl
&& !DECL_HARD_REGISTER (t)
&& !TREE_THIS_VOLATILE (t)
&& !DECL_HAS_VALUE_EXPR_P (t)
/* Only care for variables that have to be in memory. Others
will be rewritten into SSA names, hence moved to the top-level. */
&& !is_gimple_reg (t)
&& flag_stack_reuse != SR_NONE)
tree clobber = build_constructor (TREE_TYPE (t), NULL);
TREE_THIS_VOLATILE (clobber) = 1;
clobber_stmt = gimple_build_assign (t, clobber);
gimple_set_location (clobber_stmt, end_locus);
gimplify_seq_add_stmt (&cleanup, clobber_stmt);
if (flag_openacc && oacc_declare_returns != NULL)
{
tree *c = oacc_declare_returns->get (t);
if (c != NULL)
{
if (ret_clauses)
OMP_CLAUSE_CHAIN (*c) = ret_clauses;
ret_clauses = *c;
oacc_declare_returns->remove (t);
if (oacc_declare_returns->elements () == 0)
{
delete oacc_declare_returns;
oacc_declare_returns = NULL;
}
}
}
}
}
if (ret_clauses)
{
gomp_target *stmt;
gimple_stmt_iterator si = gsi_start (cleanup);
stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
ret_clauses);
gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
}
if (cleanup)
{
gimple_seq new_body;
new_body = NULL;
gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
GIMPLE_TRY_FINALLY);
if (stack_save)
gimplify_seq_add_stmt (&new_body, stack_save);
gimplify_seq_add_stmt (&new_body, gs);
gimple_bind_set_body (bind_stmt, new_body);
Eric Botcazou
committed
/* keep_stack propagates all the way up to the outermost BIND_EXPR. */
if (!gimplify_ctxp->keep_stack)
gimplify_ctxp->keep_stack = old_keep_stack;
gimplify_ctxp->save_stack = old_save_stack;
Eric Botcazou
committed
gimple_pop_bind_expr ();
gimplify_seq_add_stmt (pre_p, bind_stmt);
if (temp)
{
*expr_p = temp;
return GS_OK;
}
*expr_p = NULL_TREE;
return GS_ALL_DONE;
}
/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
GIMPLE value, it is assigned to a new temporary and the statement is
re-written to return the temporary.
PRE_P points to the sequence where side effects that must happen before
STMT should be stored. */
static enum gimplify_status
gimplify_return_expr (tree stmt, gimple_seq *pre_p)
tree ret_expr = TREE_OPERAND (stmt, 0);
Richard Henderson
committed
tree result_decl, result;
if (ret_expr == error_mark_node)
return GS_ERROR;
/* Implicit _Cilk_sync must be inserted right before any return statement
if there is a _Cilk_spawn in the function. If the user has provided a
_Cilk_sync, the optimizer should remove this duplicate one. */
if (fn_contains_cilk_spawn_p (cfun))
{
tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
gimplify_and_add (impl_sync, pre_p);
}
if (!ret_expr
|| TREE_CODE (ret_expr) == RESULT_DECL
Paolo Bonzini
committed
|| ret_expr == error_mark_node)
greturn *ret = gimple_build_return (ret_expr);
gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
gimplify_seq_add_stmt (pre_p, ret);
return GS_ALL_DONE;
}
if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
Richard Henderson
committed
result_decl = NULL_TREE;
result_decl = TREE_OPERAND (ret_expr, 0);
/* See through a return by reference. */
if (TREE_CODE (result_decl) == INDIRECT_REF)
result_decl = TREE_OPERAND (result_decl, 0);
gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
|| TREE_CODE (ret_expr) == INIT_EXPR)
&& TREE_CODE (result_decl) == RESULT_DECL);
Richard Henderson
committed
/* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
Recall that aggregate_value_p is FALSE for any aggregate type that is
returned in registers. If we're returning values in registers, then
we don't want to extend the lifetime of the RESULT_DECL, particularly
across another call. In addition, for those aggregates for which
hard_function_value generates a PARALLEL, we'll die during normal
Richard Henderson
committed
expansion of structure assignments; there's special code in expand_return
to handle this case that does not exist in expand_expr. */
Eric Botcazou
committed
if (!result_decl)
result = NULL_TREE;
else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
{
if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
{
if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
/* Note that we don't use gimplify_vla_decl because the RESULT_DECL
should be effectively allocated by the caller, i.e. all calls to
this function must be subject to the Return Slot Optimization. */
gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
}
result = result_decl;
}
Richard Henderson
committed
else if (gimplify_ctxp->return_temp)
result = gimplify_ctxp->return_temp;
else
{
Jakub Jelinek
committed
result = create_tmp_reg (TREE_TYPE (result_decl));
Richard Henderson
committed
/* ??? With complex control flow (usually involving abnormal edges),
we can wind up warning about an uninitialized value for this. Due
to how this variable is constructed and initialized, this is never
true. Give up and never warn. */
TREE_NO_WARNING (result) = 1;
Richard Henderson
committed
gimplify_ctxp->return_temp = result;
}
/* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
Richard Henderson
committed
Then gimplify the whole thing. */
if (result != result_decl)
TREE_OPERAND (ret_expr, 0) = result;
gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
ret = gimple_build_return (result);
gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
gimplify_seq_add_stmt (pre_p, ret);
return GS_ALL_DONE;
}
/* Gimplify a variable-length array DECL. */
Jakub Jelinek
committed
static void
gimplify_vla_decl (tree decl, gimple_seq *seq_p)
Jakub Jelinek
committed
{
/* This is a variable-sized decl. Simplify its size and mark it
Jakub Jelinek
committed
tree t, addr, ptr_type;
gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
Jakub Jelinek
committed
/* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
if (DECL_HAS_VALUE_EXPR_P (decl))
return;
Jakub Jelinek
committed
/* All occurrences of this decl in final gimplified code will be
replaced by indirection. Setting DECL_VALUE_EXPR does two
things: First, it lets the rest of the gimplifier know what
replacement to use. Second, it lets the debug info know
where to find the value. */
ptr_type = build_pointer_type (TREE_TYPE (decl));
addr = create_tmp_var (ptr_type, get_name (decl));
DECL_IGNORED_P (addr) = 0;
t = build_fold_indirect_ref (addr);
TREE_THIS_NOTRAP (t) = 1;
Jakub Jelinek
committed
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
Michael Meissner
committed
t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
size_int (DECL_ALIGN (decl)));
/* The call has been built for a variable-sized object. */
CALL_ALLOCA_FOR_VAR_P (t) = 1;
Jakub Jelinek
committed
t = fold_convert (ptr_type, t);
t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
Jakub Jelinek
committed
gimplify_and_add (t, seq_p);
Jakub Jelinek
committed
}
Andrew MacLeod
committed
/* A helper function to be called via walk_tree. Mark all labels under *TP
as being forced. To be called for DECL_INITIAL of static variables. */
static tree
force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
if (TYPE_P (*tp))
*walk_subtrees = 0;
if (TREE_CODE (*tp) == LABEL_DECL)
{
FORCED_LABEL (*tp) = 1;
cfun->has_forced_label_in_static = 1;
}
Andrew MacLeod
committed
return NULL_TREE;
}
/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
and initialization explicit. */
static enum gimplify_status
gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
{
tree stmt = *stmt_p;
tree decl = DECL_EXPR_DECL (stmt);
*stmt_p = NULL_TREE;
if (TREE_TYPE (decl) == error_mark_node)
return GS_ERROR;
if ((TREE_CODE (decl) == TYPE_DECL
|| TREE_CODE (decl) == VAR_DECL)
&& !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
Jakub Jelinek
committed
{
gimplify_type_sizes (TREE_TYPE (decl), seq_p);
if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
}
/* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
in case its size expressions contain problematic nodes like CALL_EXPR. */
if (TREE_CODE (decl) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl)
&& !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
Jakub Jelinek
committed
{
gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
}
if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
{
tree init = DECL_INITIAL (decl);
if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|| (!TREE_STATIC (decl)
&& flag_stack_check == GENERIC_STACK_CHECK
&& compare_tree_int (DECL_SIZE_UNIT (decl),
STACK_CHECK_MAX_VAR_SIZE) > 0))
gimplify_vla_decl (decl, seq_p);
Jason Merrill
committed
/* Some front ends do not explicitly declare all anonymous
artificial variables. We compensate here by declaring the
variables, though it would be better if the front ends would
explicitly declare them. */
if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
&& DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
gimple_add_tmp_var (decl);
if (init && init != error_mark_node)
{
if (!TREE_STATIC (decl))
{
DECL_INITIAL (decl) = NULL_TREE;
init = build2 (INIT_EXPR, void_type_node, decl, init);
gimplify_and_add (init, seq_p);
ggc_free (init);
}
else
/* We must still examine initializers for static variables
as they may contain a label address. */
walk_tree (&init, force_labels_r, NULL, NULL);
}
}
return GS_ALL_DONE;
}
/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
and replacing the LOOP_EXPR with goto, but if the loop contains an
EXIT_EXPR, we need to append a label for it to jump to. */
static enum gimplify_status
gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
{
tree saved_label = gimplify_ctxp->exit_label;
tree start_label = create_artificial_label (UNKNOWN_LOCATION);
gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
gimplify_ctxp->exit_label = NULL_TREE;
gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
if (gimplify_ctxp->exit_label)
gimplify_seq_add_stmt (pre_p,
gimple_build_label (gimplify_ctxp->exit_label));
gimplify_ctxp->exit_label = saved_label;
*expr_p = NULL;
return GS_ALL_DONE;
}
/* Gimplify a statement list onto a sequence. These may be created either
by an enlightened front-end, or by shortcut_cond_expr. */
static enum gimplify_status
gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
{
tree temp = voidify_wrapper_expr (*expr_p, NULL);
tree_stmt_iterator i = tsi_start (*expr_p);
while (!tsi_end_p (i))
gimplify_stmt (tsi_stmt_ptr (i), pre_p);
tsi_delink (&i);
if (temp)
{
*expr_p = temp;
return GS_OK;
}
return GS_ALL_DONE;
}
/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
branch to. */
static enum gimplify_status
gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
{
tree switch_expr = *expr_p;
gimple_seq switch_body_seq = NULL;
enum gimplify_status ret;
tree index_type = TREE_TYPE (switch_expr);
if (index_type == NULL_TREE)
index_type = TREE_TYPE (SWITCH_COND (switch_expr));
ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
fb_rvalue);
if (ret == GS_ERROR || ret == GS_UNHANDLED)
return ret;
if (SWITCH_BODY (switch_expr))
{
Diego Novillo
committed
vec<tree> labels;
vec<tree> saved_labels;
tree default_case = NULL_TREE;
gswitch *switch_stmt;
/* If someone can be bothered to fill in the labels, they can
be bothered to null out the body too. */
gcc_assert (!SWITCH_LABELS (switch_expr));
/* Save old labels, get new ones from body, then restore the old
labels. Save all the things from the switch body to append after. */
saved_labels = gimplify_ctxp->case_labels;
Diego Novillo
committed
gimplify_ctxp->case_labels.create (8);
gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
preprocess_case_label_vec_for_gimple (labels, index_type,
&default_case);
if (!default_case)
default_case
= build_case_label (NULL_TREE, NULL_TREE,
create_artificial_label (UNKNOWN_LOCATION));
new_default = gimple_build_label (CASE_LABEL (default_case));
gimplify_seq_add_stmt (&switch_body_seq, new_default);
}
switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
gimplify_seq_add_stmt (pre_p, switch_stmt);
gimplify_seq_add_seq (pre_p, switch_body_seq);
Diego Novillo
committed
labels.release ();
else
gcc_assert (SWITCH_LABELS (switch_expr));
return GS_ALL_DONE;
/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
static enum gimplify_status
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
/* Invalid programs can play Duff's Device type games with, for example,
#pragma omp parallel. At least in the C front end, we don't
detect such invalid branches until after gimplification, in the
diagnose_omp_blocks pass. */
for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
Diego Novillo
committed
if (ctxp->case_labels.exists ())
label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
Diego Novillo
committed
ctxp->case_labels.safe_push (*expr_p);
gimplify_seq_add_stmt (pre_p, label_stmt);
return GS_ALL_DONE;
}
/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
if necessary. */
tree
build_and_jump (tree *label_p)
{
if (label_p == NULL)
/* If there's nowhere to jump, just fall through. */
return NULL_TREE;
if (*label_p == NULL_TREE)
{
tree label = create_artificial_label (UNKNOWN_LOCATION);
*label_p = label;
}
return build1 (GOTO_EXPR, void_type_node, *label_p);
}
/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
This also involves building a label to jump to and communicating it to
gimplify_loop_expr through gimplify_ctxp->exit_label. */
static enum gimplify_status
gimplify_exit_expr (tree *expr_p)
{
tree cond = TREE_OPERAND (*expr_p, 0);
tree expr;
expr = build_and_jump (&gimplify_ctxp->exit_label);
expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
*expr_p = expr;
return GS_OK;
}
Richard Henderson
committed
/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
different from its canonical type, wrap the whole thing inside a
NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
type.
Richard Henderson
committed
The canonical type of a COMPONENT_REF is the type of the field being
referenced--unless the field is a bit-field which can be read directly
in a smaller mode, in which case the canonical type is the
sign-appropriate type corresponding to that mode. */
Richard Henderson
committed
static void
canonicalize_component_ref (tree *expr_p)
Richard Henderson
committed
tree expr = *expr_p;
tree type;
gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
Richard Henderson
committed
if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
else
type = TREE_TYPE (TREE_OPERAND (expr, 1));
Richard Guenther
committed
/* One could argue that all the stuff below is not necessary for
the non-bitfield case and declare it a FE error if type
adjustment would be needed. */
Richard Henderson
committed
if (TREE_TYPE (expr) != type)
Richard Guenther
committed
#ifdef ENABLE_TYPES_CHECKING
Richard Henderson
committed
tree old_type = TREE_TYPE (expr);
Richard Guenther
committed
#endif
int type_quals;
/* We need to preserve qualifiers and propagate them from
operand 0. */
type_quals = TYPE_QUALS (type)
| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
if (TYPE_QUALS (type) != type_quals)
type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
Richard Henderson
committed
/* Set the type of the COMPONENT_REF to the underlying type. */
TREE_TYPE (expr) = type;
Richard Guenther
committed
#ifdef ENABLE_TYPES_CHECKING
/* It is now a FE error, if the conversion from the canonical
type to the original expression type is not useless. */
gcc_assert (useless_type_conversion_p (old_type, type));
#endif
Richard Henderson
committed
}
}
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo to a pointer
to foo, embed that change in the ADDR_EXPR by converting
Richard Henderson
committed
T array[U];
(T *)&array
==>
&array[L]
where L is the lower bound. For simplicity, only do this for constant
Richard Guenther
committed
lower bound.
The constraint is that the type of &array[L] is trivially convertible
to T *. */
Richard Henderson
committed
static void
canonicalize_addr_expr (tree *expr_p)
{
tree expr = *expr_p;
tree addr_expr = TREE_OPERAND (expr, 0);
Richard Guenther
committed
tree datype, ddatype, pddatype;
Richard Guenther
committed
/* We simplify only conversions from an ADDR_EXPR to a pointer type. */
if (!POINTER_TYPE_P (TREE_TYPE (expr))
|| TREE_CODE (addr_expr) != ADDR_EXPR)
Richard Henderson
committed
return;
Richard Henderson
committed
/* The addr_expr type should be a pointer to an array. */
Richard Guenther
committed
datype = TREE_TYPE (TREE_TYPE (addr_expr));
Richard Henderson
committed
if (TREE_CODE (datype) != ARRAY_TYPE)
return;
Richard Guenther
committed
/* The pointer to element type shall be trivially convertible to
the expression pointer type. */
Richard Henderson
committed
ddatype = TREE_TYPE (datype);
Richard Guenther
committed
pddatype = build_pointer_type (ddatype);
if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
pddatype))
Richard Henderson
committed
return;
Richard Henderson
committed
/* The lower bound and element sizes must be constant. */
Richard Guenther
committed
if (!TYPE_SIZE_UNIT (ddatype)
|| TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
Richard Henderson
committed
|| !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
|| TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
return;
Richard Henderson
committed
/* All checks succeeded. Build a new node to merge the cast. */
Richard Guenther
committed
*expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
Richard Henderson
committed
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
Richard Guenther
committed
NULL_TREE, NULL_TREE);
Richard Guenther
committed
*expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
/* We can have stripped a required restrict qualifier above. */
if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
Richard Henderson
committed
}
Richard Henderson
committed
/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
underneath as appropriate. */
Richard Henderson
committed
static enum gimplify_status
gimplify_conversion (tree *expr_p)
location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (CONVERT_EXPR_P (*expr_p));
/* Then strip away all but the outermost conversion. */
STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
/* And remove the outermost conversion if it's useless. */
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
Richard Henderson
committed
{
tree sub = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If a NOP conversion is changing the type of a COMPONENT_REF
expression, then canonicalize its type now in order to expose more
redundant conversions. */
if (TREE_CODE (sub) == COMPONENT_REF)
canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo
to a pointer to foo, embed that change in the ADDR_EXPR. */
else if (TREE_CODE (sub) == ADDR_EXPR)
canonicalize_addr_expr (expr_p);
}
Richard Guenther
committed
/* If we have a conversion to a non-register type force the
use of a VIEW_CONVERT_EXPR instead. */
if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
*expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
TREE_OPERAND (*expr_p, 0));
Richard Guenther
committed
Richard Biener
committed
/* Canonicalize CONVERT_EXPR to NOP_EXPR. */
if (TREE_CODE (*expr_p) == CONVERT_EXPR)
TREE_SET_CODE (*expr_p, NOP_EXPR);
return GS_OK;
}
/* Nonlocal VLAs seen in the current function. */
static hash_set<tree> *nonlocal_vlas;
/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
static tree nonlocal_vla_vars;
/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
DECL_VALUE_EXPR, and it's worth re-examining things. */
static enum gimplify_status
gimplify_var_or_parm_decl (tree *expr_p)
{
tree decl = *expr_p;
/* ??? If this is a local variable, and it has not been seen in any
outer BIND_EXPR, then it's probably the result of a duplicate
declaration, for which we've already issued an error. It would
be really nice if the front end wouldn't leak these at all.
Currently the only known culprit is C++ destructors, as seen
in g++.old-deja/g++.jason/binding.C. */
if (TREE_CODE (decl) == VAR_DECL
&& !DECL_SEEN_IN_BIND_EXPR_P (decl)
&& !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
&& decl_function_context (decl) == current_function_decl)
{
return GS_ERROR;
}
/* When within an OMP context, notice uses of variables. */
if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
return GS_ALL_DONE;
/* If the decl is an alias for another expression, substitute it now. */
if (DECL_HAS_VALUE_EXPR_P (decl))
{
tree value_expr = DECL_VALUE_EXPR (decl);
/* For referenced nonlocal VLAs add a decl for debugging purposes
to the current function. */
if (TREE_CODE (decl) == VAR_DECL
&& TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
&& nonlocal_vlas != NULL
&& TREE_CODE (value_expr) == INDIRECT_REF
&& TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
&& decl_function_context (decl) != current_function_decl)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
while (ctx
&& (ctx->region_type == ORT_WORKSHARE
Nathan Sidwell
committed
|| ctx->region_type == ORT_SIMD
|| ctx->region_type == ORT_ACC))
ctx = ctx->outer_context;
if (!ctx && !nonlocal_vlas->add (decl))
tree copy = copy_node (decl);
lang_hooks.dup_lang_specific_decl (copy);
SET_DECL_RTL (copy, 0);
TREE_USED (copy) = 1;
DECL_CHAIN (copy) = nonlocal_vla_vars;
nonlocal_vla_vars = copy;
SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
DECL_HAS_VALUE_EXPR_P (copy) = 1;
}
}
*expr_p = unshare_expr (value_expr);
return GS_OK;
}
return GS_ALL_DONE;
}
/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
static void
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
recalculate_side_effects (tree t)
{
enum tree_code code = TREE_CODE (t);
int len = TREE_OPERAND_LENGTH (t);
int i;
switch (TREE_CODE_CLASS (code))
{
case tcc_expression:
switch (code)
{
case INIT_EXPR:
case MODIFY_EXPR:
case VA_ARG_EXPR:
case PREDECREMENT_EXPR:
case PREINCREMENT_EXPR:
case POSTDECREMENT_EXPR:
case POSTINCREMENT_EXPR:
/* All of these have side-effects, no matter what their
operands are. */
return;
default:
break;
}
/* Fall through. */
case tcc_comparison: /* a comparison expression */
case tcc_unary: /* a unary arithmetic expression */
case tcc_binary: /* a binary arithmetic expression */
case tcc_reference: /* a reference */
case tcc_vl_exp: /* a function call */
TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
for (i = 0; i < len; ++i)
{
tree op = TREE_OPERAND (t, i);
if (op && TREE_SIDE_EFFECTS (op))
TREE_SIDE_EFFECTS (t) = 1;
}
break;
case tcc_constant:
/* No side-effects. */
return;
default:
gcc_unreachable ();
}
}
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
node *EXPR_P.
compound_lval
: min_lval '[' val ']'
| min_lval '.' ID
| compound_lval '[' val ']'
| compound_lval '.' ID
This is not part of the original SIMPLE definition, which separates
array and member references, but it seems reasonable to handle them
together. Also, this way we don't run into problems with union
aliasing; gcc requires that for accesses through a union to alias, the
union reference must be explicit, which was not always the case when we
were splitting up array and member refs.
PRE_P points to the sequence where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the sequence where side effects that must happen after
*EXPR_P should be stored. */
static enum gimplify_status
gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
fallback_t fallback)
enum gimplify_status ret = GS_ALL_DONE, tret;
int i;
location_t loc = EXPR_LOCATION (*expr_p);
tree expr = *expr_p;
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
{
restart:
/* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
if (TREE_CODE (*p) == INDIRECT_REF)
*p = fold_indirect_ref_loc (loc, *p);
if (handled_component_p (*p))
;
/* Expand DECL_VALUE_EXPR now. In some cases that may expose
additional COMPONENT_REFs. */