Newer
Older
Aldy Hernandez
committed
{
if (notify_temp_creation)
return GS_OK;
break;
}
Olivier Hainque
committed
/* Fetch information about the constructor to direct later processing.
We might want to make static versions of it in various cases, and
can only do so if it known to be a valid constant initializer. */
valid_const_initializer
= categorize_ctor_elements (ctor, &num_nonzero_elements,
&num_ctor_elements, &complete_p);
Richard Henderson
committed
/* If a const aggregate variable is being initialized, then it
should never be a lose to promote the variable to be static. */
Olivier Hainque
committed
if (valid_const_initializer
&& num_nonzero_elements > 1
Richard Henderson
committed
&& TREE_READONLY (object)
&& TREE_CODE (object) == VAR_DECL
&& (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
Richard Henderson
committed
{
Aldy Hernandez
committed
if (notify_temp_creation)
return GS_ERROR;
Richard Henderson
committed
DECL_INITIAL (object) = ctor;
TREE_STATIC (object) = 1;
if (!DECL_NAME (object))
DECL_NAME (object) = create_tmp_var_name ("C");
walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
/* ??? C++ doesn't automatically append a .<number> to the
assembler name, and even when it does, it looks at FE private
Richard Henderson
committed
data structures to figure out what that number should be,
which are not set for this variable. I suppose this is
important for local statics for inline functions, which aren't
"local" in the object file sense. So in order to get a unique
TU-local symbol, we must invoke the lhd version now. */
lhd_set_decl_assembler_name (object);
*expr_p = NULL_TREE;
break;
}
/* If there are "lots" of initialized elements, even discounting
those that are not address constants (and thus *must* be
computed at runtime), then partition the constructor into
constant and non-constant parts. Block copy the constant
parts in, then generate code for the non-constant parts. */
/* TODO. There's code in cp/typeck.c to do this. */
if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
/* store_constructor will ignore the clearing of variable-sized
objects. Initializers for such objects must explicitly set
every field that needs to be set. */
cleared = false;
else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
/* If the constructor isn't complete, clear the whole object
beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
??? This ought not to be needed. For any element not present
in the initializer, we should simply set them to zero. Except
we'd need to *find* the elements that are not present, and that
requires trickery to avoid quadratic compile-time behavior in
large cases or excessive memory use in small cases. */
Jakub Jelinek
committed
cleared = true;
else if (num_ctor_elements - num_nonzero_elements
> CLEAR_RATIO (optimize_function_for_speed_p (cfun))
&& num_nonzero_elements < num_ctor_elements / 4)
/* If there are "lots" of zeros, it's more efficient to clear
the memory and then set the nonzero elements. */
cleared = true;
else
cleared = false;
Richard Henderson
committed
/* If there are "lots" of initialized elements, and all of them
are valid address constants, then the entire initializer can
be dropped to memory, and then memcpy'd out. Don't do this
for sparse arrays, though, as it's more efficient to follow
the standard CONSTRUCTOR behavior of memset followed by
Jason Merrill
committed
individual element initialization. Also don't do this for small
all-zero initializers (which aren't big enough to merit
clearing), and don't try to make bitwise copies of
Ilya Enkovich
committed
TREE_ADDRESSABLE types.
We cannot apply such transformation when compiling chkp static
initializer because creation of initializer image in the memory
will require static initialization of bounds for it. It should
result in another gimplification of similar initializer and we
may fall into infinite loop. */
Jason Merrill
committed
if (valid_const_initializer
&& !(cleared || num_nonzero_elements == 0)
Ilya Enkovich
committed
&& !TREE_ADDRESSABLE (type)
&& (!current_function_decl
|| !lookup_attribute ("chkp ctor",
DECL_ATTRIBUTES (current_function_decl))))
Richard Henderson
committed
4095
4096
4097
4098
4099
4100
4101
4102
4103
4104
4105
4106
4107
4108
4109
4110
4111
4112
4113
4114
4115
{
HOST_WIDE_INT size = int_size_in_bytes (type);
unsigned int align;
/* ??? We can still get unbounded array types, at least
from the C++ front end. This seems wrong, but attempt
to work around it for now. */
if (size < 0)
{
size = int_size_in_bytes (TREE_TYPE (object));
if (size >= 0)
TREE_TYPE (ctor) = type = TREE_TYPE (object);
}
/* Find the maximum alignment we can assume for the object. */
/* ??? Make use of DECL_OFFSET_ALIGN. */
if (DECL_P (object))
align = DECL_ALIGN (object);
else
align = TYPE_ALIGN (type);
Eric Botcazou
committed
/* Do a block move either if the size is so small as to make
each individual move a sub-unit move on average, or if it
is so large as to make individual moves inefficient. */
Jakub Jelinek
committed
if (size > 0
&& num_nonzero_elements > 1
Eric Botcazou
committed
&& (size < num_nonzero_elements
|| !can_move_by_pieces (size, align)))
Richard Henderson
committed
{
Aldy Hernandez
committed
if (notify_temp_creation)
return GS_ERROR;
Eric Botcazou
committed
walk_tree (&ctor, force_labels_r, NULL, NULL);
ctor = tree_output_constant_def (ctor);
if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
TREE_OPERAND (*expr_p, 1) = ctor;
/* This is no longer an assignment of a CONSTRUCTOR, but
we still may have processing to do on the LHS. So
pretend we didn't do anything here to let that happen. */
return GS_UNHANDLED;
Richard Henderson
committed
}
}
Eric Botcazou
committed
/* If the target is volatile, we have non-zero elements and more than
one field to assign, initialize the target from a temporary. */
Richard Guenther
committed
if (TREE_THIS_VOLATILE (object)
&& !TREE_ADDRESSABLE (type)
Eric Botcazou
committed
&& num_nonzero_elements > 0
Diego Novillo
committed
&& vec_safe_length (elts) > 1)
Richard Guenther
committed
{
tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
TREE_OPERAND (*expr_p, 0) = temp;
*expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
*expr_p,
build2 (MODIFY_EXPR, void_type_node,
object, temp));
return GS_OK;
}
Aldy Hernandez
committed
if (notify_temp_creation)
return GS_OK;
/* If there are nonzero elements and if needed, pre-evaluate to capture
elements overlapping with the lhs into temporaries. We must do this
before clearing to fetch the values before they are zeroed-out. */
if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
Olivier Hainque
committed
{
preeval_data.lhs_base_decl = get_base_address (object);
if (!DECL_P (preeval_data.lhs_base_decl))
preeval_data.lhs_base_decl = NULL;
preeval_data.lhs_alias_set = get_alias_set (object);
gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
Olivier Hainque
committed
pre_p, post_p, &preeval_data);
}
Richard Henderson
committed
if (cleared)
{
/* Zap the CONSTRUCTOR element list, which simplifies this case.
Note that we still have to gimplify, in order to handle the
case of variable sized types. Avoid shared tree structures. */
CONSTRUCTOR_ELTS (ctor) = NULL;
TREE_SIDE_EFFECTS (ctor) = 0;
object = unshare_expr (object);
gimplify_stmt (expr_p, pre_p);
Richard Henderson
committed
}
/* If we have not block cleared the object, or if there are nonzero
elements in the constructor, add assignments to the individual
scalar fields of the object. */
if (!cleared || num_nonzero_elements > 0)
Olivier Hainque
committed
gimplify_init_ctor_eval (object, elts, pre_p, cleared);
Richard Henderson
committed
*expr_p = NULL_TREE;
}
break;
case COMPLEX_TYPE:
{
tree r, i;
Aldy Hernandez
committed
if (notify_temp_creation)
return GS_OK;
Richard Henderson
committed
/* Extract the real and imaginary parts out of the ctor. */
Diego Novillo
committed
gcc_assert (elts->length () == 2);
r = (*elts)[0].value;
i = (*elts)[1].value;
Richard Henderson
committed
if (r == NULL || i == NULL)
{
Nathan Froyd
committed
tree zero = build_zero_cst (TREE_TYPE (type));
Richard Henderson
committed
if (r == NULL)
r = zero;
if (i == NULL)
i = zero;
}
/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
represent creation of a complex value. */
if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
{
ctor = build_complex (type, r, i);
TREE_OPERAND (*expr_p, 1) = ctor;
}
else
{
ctor = build2 (COMPLEX_EXPR, type, r, i);
Richard Henderson
committed
TREE_OPERAND (*expr_p, 1) = ctor;
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
pre_p,
post_p,
rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
fb_rvalue);
Richard Henderson
committed
}
}
break;
Richard Henderson
committed
case VECTOR_TYPE:
{
unsigned HOST_WIDE_INT ix;
constructor_elt *ce;
Aldy Hernandez
committed
if (notify_temp_creation)
return GS_OK;
/* Go ahead and simplify constant constructors to VECTOR_CST. */
if (TREE_CONSTANT (ctor))
{
bool constant_p = true;
tree value;
/* Even when ctor is constant, it might contain non-*_CST
Richard Sandiford
committed
elements, such as addresses or trapping values like
1.0/0.0 - 1.0/0.0. Such expressions don't belong
in VECTOR_CST nodes. */
FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
if (!CONSTANT_CLASS_P (value))
{
constant_p = false;
break;
}
if (constant_p)
{
TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
break;
}
Richard Sandiford
committed
/* Don't reduce an initializer constant even if we can't
make a VECTOR_CST. It won't do anything for us, and it'll
prevent us from representing it as a single constant. */
Richard Sandiford
committed
if (initializer_constant_valid_p (ctor, type))
break;
TREE_CONSTANT (ctor) = 0;
/* Vector types use CONSTRUCTOR all the way through gimple
compilation as a general initializer. */
Diego Novillo
committed
FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
{
enum gimplify_status tret;
tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
fb_rvalue);
if (tret == GS_ERROR)
ret = GS_ERROR;
}
if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
Richard Henderson
committed
break;
Richard Henderson
committed
default:
/* So how did we get a CONSTRUCTOR for a scalar type? */
gcc_unreachable ();
Richard Henderson
committed
}
Richard Henderson
committed
if (ret == GS_ERROR)
return GS_ERROR;
else if (want_value)
{
*expr_p = object;
return GS_OK;
Richard Henderson
committed
else
{
/* If we have gimplified both sides of the initializer but have
not emitted an assignment, do so now. */
if (*expr_p)
{
tree lhs = TREE_OPERAND (*expr_p, 0);
tree rhs = TREE_OPERAND (*expr_p, 1);
gimple init = gimple_build_assign (lhs, rhs);
gimplify_seq_add_stmt (pre_p, init);
*expr_p = NULL;
}
return GS_ALL_DONE;
}
Richard Henderson
committed
}
/* Given a pointer value OP0, return a simplified version of an
indirection through OP0, or NULL_TREE if no simplification is
possible. This may only be applied to a rhs of an expression.
Note that the resulting type may be different from the type pointed
to in the sense that it is still compatible from the langhooks
point of view. */
static tree
gimple_fold_indirect_ref_rhs (tree t)
{
return gimple_fold_indirect_ref (t);
}
Aldy Hernandez
committed
/* Subroutine of gimplify_modify_expr to do simplifications of
MODIFY_EXPRs based on the code of the RHS. We loop for as long as
something changes. */
Richard Henderson
committed
static enum gimplify_status
gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
gimple_seq *pre_p, gimple_seq *post_p,
bool want_value)
Richard Henderson
committed
{
enum gimplify_status ret = GS_UNHANDLED;
bool changed;
4343
4344
4345
4346
4347
4348
4349
4350
4351
4352
4353
4354
4355
4356
4357
4358
4359
4360
4361
4362
4363
4364
4365
4366
4367
4368
4369
4370
4371
4372
4373
4374
4375
4376
4377
4378
4379
4380
4381
do
{
changed = false;
switch (TREE_CODE (*from_p))
{
case VAR_DECL:
/* If we're assigning from a read-only variable initialized with
a constructor, do the direct assignment from the constructor,
but only if neither source nor target are volatile since this
latter assignment might end up being done on a per-field basis. */
if (DECL_INITIAL (*from_p)
&& TREE_READONLY (*from_p)
&& !TREE_THIS_VOLATILE (*from_p)
&& !TREE_THIS_VOLATILE (*to_p)
&& TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
{
tree old_from = *from_p;
enum gimplify_status subret;
/* Move the constructor into the RHS. */
*from_p = unshare_expr (DECL_INITIAL (*from_p));
/* Let's see if gimplify_init_constructor will need to put
it in memory. */
subret = gimplify_init_constructor (expr_p, NULL, NULL,
false, true);
if (subret == GS_ERROR)
{
/* If so, revert the change. */
*from_p = old_from;
}
else
{
ret = GS_OK;
changed = true;
}
}
break;
case INDIRECT_REF:
Aldy Hernandez
committed
{
/* If we have code like
Aldy Hernandez
committed
*(const A*)(A*)&x
Aldy Hernandez
committed
where the type of "x" is a (possibly cv-qualified variant
of "A"), treat the entire expression as identical to "x".
This kind of code arises in C++ when an object is bound
to a const reference, and if "x" is a TARGET_EXPR we want
to take advantage of the optimization below. */
Richard Guenther
committed
bool volatile_p = TREE_THIS_VOLATILE (*from_p);
tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
if (t)
Aldy Hernandez
committed
{
Richard Guenther
committed
if (TREE_THIS_VOLATILE (t) != volatile_p)
{
if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
build_fold_addr_expr (t));
if (REFERENCE_CLASS_P (t))
TREE_THIS_VOLATILE (t) = volatile_p;
}
*from_p = t;
ret = GS_OK;
changed = true;
Aldy Hernandez
committed
}
break;
}
case TARGET_EXPR:
{
/* If we are initializing something from a TARGET_EXPR, strip the
TARGET_EXPR and initialize it directly, if possible. This can't
be done if the initializer is void, since that implies that the
temporary is set in some non-trivial way.
??? What about code that pulls out the temp and uses it
elsewhere? I think that such code never uses the TARGET_EXPR as
an initializer. If I'm wrong, we'll die because the temp won't
have any RTL. In that case, I guess we'll need to replace
references somehow. */
tree init = TARGET_EXPR_INITIAL (*from_p);
if (init
&& !VOID_TYPE_P (TREE_TYPE (init)))
Aldy Hernandez
committed
{
*from_p = init;
Aldy Hernandez
committed
ret = GS_OK;
changed = true;
Aldy Hernandez
committed
}
Aldy Hernandez
committed
}
case COMPOUND_EXPR:
/* Remove any COMPOUND_EXPR in the RHS so the following cases will be
caught. */
gimplify_compound_expr (from_p, pre_p, true);
ret = GS_OK;
changed = true;
break;
case CONSTRUCTOR:
/* If we already made some changes, let the front end have a
crack at this before we break it down. */
if (ret != GS_UNHANDLED)
break;
/* If we're initializing from a CONSTRUCTOR, break this into
individual MODIFY_EXPRs. */
return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
false);
case COND_EXPR:
/* If we're assigning to a non-register type, push the assignment
down into the branches. This is mandatory for ADDRESSABLE types,
since we cannot generate temporaries for such, but it saves a
copy in other cases as well. */
if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4460
4461
4462
4463
4464
4465
4466
4467
4468
4469
4470
4471
4472
4473
4474
4475
4476
4477
4478
4479
4480
4481
4482
4483
4484
4485
4486
4487
4488
4489
/* This code should mirror the code in gimplify_cond_expr. */
enum tree_code code = TREE_CODE (*expr_p);
tree cond = *from_p;
tree result = *to_p;
ret = gimplify_expr (&result, pre_p, post_p,
is_gimple_lvalue, fb_lvalue);
if (ret != GS_ERROR)
ret = GS_OK;
if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
TREE_OPERAND (cond, 1)
= build2 (code, void_type_node, result,
TREE_OPERAND (cond, 1));
if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
TREE_OPERAND (cond, 2)
= build2 (code, void_type_node, unshare_expr (result),
TREE_OPERAND (cond, 2));
TREE_TYPE (cond) = void_type_node;
recalculate_side_effects (cond);
if (want_value)
{
gimplify_and_add (cond, pre_p);
*expr_p = unshare_expr (result);
}
else
*expr_p = cond;
return ret;
}
break;
case CALL_EXPR:
/* For calls that return in memory, give *to_p as the CALL_EXPR's
return slot so that we don't generate a temporary. */
if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
&& aggregate_value_p (*from_p, *from_p))
Richard Henderson
committed
{
bool use_target;
if (!(rhs_predicate_for (*to_p))(*from_p))
/* If we need a temporary, *to_p isn't accurate. */
use_target = false;
/* It's OK to use the return slot directly unless it's an NRV. */
else if (TREE_CODE (*to_p) == RESULT_DECL
&& DECL_NAME (*to_p) == NULL_TREE
&& needs_to_live_in_memory (*to_p))
use_target = true;
else if (is_gimple_reg_type (TREE_TYPE (*to_p))
|| (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
/* Don't force regs into memory. */
use_target = false;
else if (TREE_CODE (*expr_p) == INIT_EXPR)
/* It's OK to use the target directly if it's being
initialized. */
use_target = true;
Richard Guenther
committed
else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
/* Always use the target and thus RSO for variable-sized types.
GIMPLE cannot deal with a variable-sized assignment
embedded in a call statement. */
use_target = true;
else if (TREE_CODE (*to_p) != SSA_NAME
&& (!is_gimple_variable (*to_p)
|| needs_to_live_in_memory (*to_p)))
/* Don't use the original target if it's already addressable;
if its address escapes, and the called function uses the
NRV optimization, a conforming program could see *to_p
change before the called function returns; see c++/19317.
When optimizing, the return_slot pass marks more functions
as safe after we have escape info. */
use_target = false;
else
use_target = true;
if (use_target)
{
CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
mark_addressable (*to_p);
}
Richard Henderson
committed
}
case WITH_SIZE_EXPR:
/* Likewise for calls that return an aggregate of non-constant size,
since we would not be able to generate a temporary at all. */
if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
{
*from_p = TREE_OPERAND (*from_p, 0);
Jason Merrill
committed
/* We don't change ret in this case because the
WITH_SIZE_EXPR might have been added in
gimplify_modify_expr, so returning GS_OK would lead to an
infinite loop. */
changed = true;
}
break;
/* If we're initializing from a container, push the initialization
inside it. */
case CLEANUP_POINT_EXPR:
case BIND_EXPR:
case STATEMENT_LIST:
Richard Henderson
committed
{
tree wrap = *from_p;
tree t;
ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
fb_lvalue);
if (ret != GS_ERROR)
ret = GS_OK;
t = voidify_wrapper_expr (wrap, *expr_p);
gcc_assert (t == *expr_p);
if (want_value)
{
gimplify_and_add (wrap, pre_p);
*expr_p = unshare_expr (*to_p);
}
else
*expr_p = wrap;
return GS_OK;
Richard Henderson
committed
}
case COMPOUND_LITERAL_EXPR:
tree complit = TREE_OPERAND (*expr_p, 1);
tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
tree decl = DECL_EXPR_DECL (decl_s);
tree init = DECL_INITIAL (decl);
/* struct T x = (struct T) { 0, 1, 2 } can be optimized
into struct T x = { 0, 1, 2 } if the address of the
compound literal has never been taken. */
if (!TREE_ADDRESSABLE (complit)
&& !TREE_ADDRESSABLE (decl)
&& init)
*expr_p = copy_node (*expr_p);
TREE_OPERAND (*expr_p, 1) = init;
return GS_OK;
}
}
default:
break;
}
while (changed);
return ret;
}
Richard Guenther
committed
4613
4614
4615
4616
4617
4618
4619
4620
4621
4622
4623
4624
4625
4626
4627
4628
4629
4630
4631
4632
4633
4634
4635
4636
4637
4638
4639
4640
4641
4642
4643
4644
/* Return true if T looks like a valid GIMPLE statement. */
static bool
is_gimple_stmt (tree t)
{
const enum tree_code code = TREE_CODE (t);
switch (code)
{
case NOP_EXPR:
/* The only valid NOP_EXPR is the empty statement. */
return IS_EMPTY_STMT (t);
case BIND_EXPR:
case COND_EXPR:
/* These are only valid if they're void. */
return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
case SWITCH_EXPR:
case GOTO_EXPR:
case RETURN_EXPR:
case LABEL_EXPR:
case CASE_LABEL_EXPR:
case TRY_CATCH_EXPR:
case TRY_FINALLY_EXPR:
case EH_FILTER_EXPR:
case CATCH_EXPR:
case ASM_EXPR:
case STATEMENT_LIST:
case OMP_PARALLEL:
case OMP_FOR:
Richard Guenther
committed
case OMP_SECTIONS:
case OMP_SECTION:
case OMP_SINGLE:
case OMP_MASTER:
Richard Guenther
committed
case OMP_ORDERED:
case OMP_CRITICAL:
case OMP_TASK:
/* These are always void. */
return true;
case CALL_EXPR:
case MODIFY_EXPR:
case PREDICT_EXPR:
/* These are valid regardless of their type. */
return true;
default:
return false;
}
}
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
DECL_GIMPLE_REG_P set.
IMPORTANT NOTE: This promotion is performed by introducing a load of the
other, unmodified part of the complex object just before the total store.
As a consequence, if the object is still uninitialized, an undefined value
will be loaded into a register, which may result in a spurious exception
if the register is floating-point and the value happens to be a signaling
NaN for example. Then the fully-fledged complex operations lowering pass
followed by a DCE pass are necessary in order to fix things up. */
static enum gimplify_status
gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
bool want_value)
{
enum tree_code code, ocode;
tree lhs, rhs, new_rhs, other, realpart, imagpart;
lhs = TREE_OPERAND (*expr_p, 0);
rhs = TREE_OPERAND (*expr_p, 1);
code = TREE_CODE (lhs);
lhs = TREE_OPERAND (lhs, 0);
ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
other = build1 (ocode, TREE_TYPE (rhs), lhs);
Richard Guenther
committed
TREE_NO_WARNING (other) = 1;
other = get_formal_tmp_var (other, pre_p);
realpart = code == REALPART_EXPR ? rhs : other;
imagpart = code == REALPART_EXPR ? other : rhs;
if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
else
new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
*expr_p = (want_value) ? rhs : NULL_TREE;
return GS_ALL_DONE;
}
/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
modify_expr
: varname '=' rhs
| '*' ID '=' rhs
PRE_P points to the list where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the list where side effects that must happen after
*EXPR_P should be stored.
WANT_VALUE is nonzero iff we want to use the value of this expression
in another expression. */
static enum gimplify_status
gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
bool want_value)
tree *from_p = &TREE_OPERAND (*expr_p, 1);
tree *to_p = &TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
gimple assign;
location_t loc = EXPR_LOCATION (*expr_p);
gimple_stmt_iterator gsi;
gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
|| TREE_CODE (*expr_p) == INIT_EXPR);
if (fn_contains_cilk_spawn_p (cfun)
&& lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
&& !seen_error ())
return (enum gimplify_status)
lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
/* Trying to simplify a clobber using normal logic doesn't work,
so handle it here. */
if (TREE_CLOBBER_P (*from_p))
{
ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
gcc_assert (!want_value
&& (TREE_CODE (*to_p) == VAR_DECL
|| TREE_CODE (*to_p) == MEM_REF));
gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
*expr_p = NULL;
return GS_ALL_DONE;
}
/* Insert pointer conversions required by the middle-end that are not
required by the frontend. This fixes middle-end type checking for
for example gcc.dg/redecl-6.c. */
if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
{
STRIP_USELESS_TYPE_CONVERSION (*from_p);
if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
}
/* See if any simplifications can be done based on what the RHS is. */
ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
want_value);
if (ret != GS_UNHANDLED)
return ret;
/* For zero sized types only gimplify the left hand side and right hand
side as statements and throw away the assignment. Do this after
gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
types properly. */
if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
gimplify_stmt (from_p, pre_p);
gimplify_stmt (to_p, pre_p);
*expr_p = NULL_TREE;
return GS_ALL_DONE;
}
/* If the value being copied is of variable width, compute the length
of the copy into a WITH_SIZE_EXPR. Note that we need to do this
before gimplifying any of the operands so that we can resolve any
PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
the size of the expression to be copied, not of the destination, so
that is what we must do here. */
ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
/* As a special case, we have to temporarily allow for assignments
with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
a toplevel statement, when gimplifying the GENERIC expression
MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
prevent gimplify_expr from trying to create a new temporary for
foo's LHS, we tell it that it should only gimplify until it
reaches the CALL_EXPR. On return from gimplify_expr, the newly
created GIMPLE_CALL <foo> will be the last statement in *PRE_P
and all we need to do here is set 'a' to be its LHS. */
ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
fb_rvalue);
if (ret == GS_ERROR)
return ret;
/* Now see if the above changed *from_p to something we handle specially. */
ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
want_value);
if (ret != GS_UNHANDLED)
return ret;
/* If we've got a variable sized assignment between two lvalues (i.e. does
not involve a call), then we can make things a bit more straightforward
by converting the assignment to memcpy or memset. */
if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
{
tree from = TREE_OPERAND (*from_p, 0);
tree size = TREE_OPERAND (*from_p, 1);
if (TREE_CODE (from) == CONSTRUCTOR)
return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
if (is_gimple_addressable (from))
return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
pre_p);
Richard Henderson
committed
/* Transform partial stores to non-addressable complex variables into
total stores. This allows us to use real instead of virtual operands
for these variables, which improves optimization. */
if ((TREE_CODE (*to_p) == REALPART_EXPR
|| TREE_CODE (*to_p) == IMAGPART_EXPR)
&& is_gimple_reg (TREE_OPERAND (*to_p, 0)))
return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
/* Try to alleviate the effects of the gimplification creating artificial
temporaries (see for example is_gimple_reg_rhs) on the debug info. */
if (!gimplify_ctxp->into_ssa
Jakub Jelinek
committed
&& TREE_CODE (*from_p) == VAR_DECL
&& DECL_IGNORED_P (*from_p)
&& DECL_P (*to_p)
&& !DECL_IGNORED_P (*to_p))
{
if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
DECL_NAME (*from_p)
= create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
SET_DECL_DEBUG_EXPR (*from_p, *to_p);
}
Nathan Sidwell
committed
if (want_value && TREE_THIS_VOLATILE (*to_p))
*from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
if (TREE_CODE (*from_p) == CALL_EXPR)
{
/* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
instead of a GIMPLE_ASSIGN. */
Richard Guenther
committed
tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
assign = gimple_build_call_from_tree (*from_p);
Richard Guenther
committed
gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
notice_special_calls (assign);
Alexandre Oliva
committed
if (!gimple_call_noreturn_p (assign))
gimple_call_set_lhs (assign, *to_p);
{
assign = gimple_build_assign (*to_p, *from_p);
gimple_set_location (assign, EXPR_LOCATION (*expr_p));
}
if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
Richard Guenther
committed
/* We should have got an SSA name from the start. */
gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
gimplify_seq_add_stmt (pre_p, assign);
gsi = gsi_last (*pre_p);
/* Don't fold stmts inside of target construct. We'll do it
during omplower pass instead. */
struct gimplify_omp_ctx *ctx;
for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
if (ctx->region_type == ORT_TARGET)
break;
if (ctx == NULL)
fold_stmt (&gsi);
if (want_value)
{
Nathan Sidwell
committed
*expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
else
*expr_p = NULL;
return GS_ALL_DONE;
/* Gimplify a comparison between two variable-sized objects. Do this
with a call to BUILT_IN_MEMCMP. */
static enum gimplify_status
gimplify_variable_sized_compare (tree *expr_p)
{
Eric Botcazou
committed
location_t loc = EXPR_LOCATION (*expr_p);
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
Eric Botcazou
committed
tree t, arg, dest, src, expr;
arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
arg = unshare_expr (arg);
arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
src = build_fold_addr_expr_loc (loc, op1);
dest = build_fold_addr_expr_loc (loc, op0);
Michael Meissner
committed
t = builtin_decl_implicit (BUILT_IN_MEMCMP);
t = build_call_expr_loc (loc, t, 3, dest, src, arg);
Eric Botcazou
committed
expr
= build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
Eric Botcazou
committed
SET_EXPR_LOCATION (expr, loc);
*expr_p = expr;
return GS_OK;
}
/* Gimplify a comparison between two aggregate objects of integral scalar
mode as a comparison between the bitwise equivalent scalar values. */
static enum gimplify_status
gimplify_scalar_mode_aggregate_compare (tree *expr_p)
{
location_t loc = EXPR_LOCATION (*expr_p);
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
tree type = TREE_TYPE (op0);
tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
*expr_p
= fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
return GS_OK;
}
/* Gimplify an expression sequence. This function gimplifies each
expression and rewrites the original expression with the last
expression of the sequence in GIMPLE form.
PRE_P points to the list where the side effects for all the
expressions in the sequence will be emitted.
WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
static enum gimplify_status
gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
{
tree t = *expr_p;
do
{
tree *sub_p = &TREE_OPERAND (t, 0);
if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
gimplify_compound_expr (sub_p, pre_p, false);
else
gimplify_stmt (sub_p, pre_p);
t = TREE_OPERAND (t, 1);
}
while (TREE_CODE (t) == COMPOUND_EXPR);
*expr_p = t;
if (want_value)
return GS_OK;
else
{
gimplify_stmt (expr_p, pre_p);