Newer
Older
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
/* Don't warn for terminated branches, i.e. when the subsequent case labels
immediately breaks. */
gsi = *gsi_p;
/* Skip all immediately following labels. */
while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
gsi_next (&gsi);
/* { ... something; default:; } */
if (gsi_end_p (gsi)
/* { ... something; default: break; } or
{ ... something; default: goto L; } */
|| gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
/* { ... something; default: return; } */
|| gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
return false;
return true;
}
/* Callback for walk_gimple_seq. */
static tree
warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *)
{
gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
{
case GIMPLE_TRY:
case GIMPLE_BIND:
case GIMPLE_CATCH:
case GIMPLE_EH_FILTER:
case GIMPLE_TRANSACTION:
/* Walk the sub-statements. */
*handled_ops_p = false;
break;
/* Find a sequence of form:
GIMPLE_LABEL
[...]
<may fallthru stmt>
GIMPLE_LABEL
and possibly warn. */
case GIMPLE_LABEL:
{
/* Found a label. Skip all immediately following labels. */
while (!gsi_end_p (*gsi_p)
&& gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
gsi_next (gsi_p);
/* There might be no more statements. */
if (gsi_end_p (*gsi_p))
return integer_zero_node;
/* Vector of labels that fall through. */
auto_vec <struct label_entry> labels;
gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
/* There might be no more statements. */
if (gsi_end_p (*gsi_p))
return integer_zero_node;
gimple *next = gsi_stmt (*gsi_p);
tree label;
/* If what follows is a label, then we may have a fallthrough. */
if (gimple_code (next) == GIMPLE_LABEL
&& gimple_has_location (next)
&& (label = gimple_label_label (as_a <glabel *> (next)))
&& prev != NULL)
{
struct label_entry *l;
bool warned_p = false;
if (!should_warn_for_implicit_fallthrough (gsi_p, label))
/* Quiet. */;
else if (gimple_code (prev) == GIMPLE_LABEL
&& (label = gimple_label_label (as_a <glabel *> (prev)))
&& (l = find_label_entry (&labels, label)))
"this statement may fall through");
else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
/* Try to be clever and don't warn when the statement
can't actually fall through. */
&& gimple_stmt_may_fallthru (prev)
&& gimple_has_location (prev))
warned_p = warning_at (gimple_location (prev),
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
"this statement may fall through");
if (warned_p)
inform (gimple_location (next), "here");
/* Mark this label as processed so as to prevent multiple
warnings in nested switches. */
FALLTHROUGH_LABEL_P (label) = true;
/* So that next warn_implicit_fallthrough_r will start looking for
a new sequence starting with this label. */
gsi_prev (gsi_p);
}
}
break;
default:
break;
}
return NULL_TREE;
}
/* Warn when a switch case falls through. */
static void
maybe_warn_implicit_fallthrough (gimple_seq seq)
{
if (!warn_implicit_fallthrough)
return;
/* This warning is meant for C/C++/ObjC/ObjC++ only. */
if (!(lang_GNU_C ()
|| lang_GNU_CXX ()
|| lang_GNU_OBJC ()))
return;
struct walk_stmt_info wi;
memset (&wi, 0, sizeof (wi));
walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
}
/* Callback for walk_gimple_seq. */
static tree
expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *)
{
gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
{
case GIMPLE_TRY:
case GIMPLE_BIND:
case GIMPLE_CATCH:
case GIMPLE_EH_FILTER:
case GIMPLE_TRANSACTION:
/* Walk the sub-statements. */
*handled_ops_p = false;
break;
case GIMPLE_CALL:
if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
{
gsi_remove (gsi_p, true);
if (gsi_end_p (*gsi_p))
return integer_zero_node;
bool found = false;
location_t loc = gimple_location (stmt);
gimple_stmt_iterator gsi2 = *gsi_p;
stmt = gsi_stmt (gsi2);
if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
{
/* Go on until the artificial label. */
tree goto_dest = gimple_goto_dest (stmt);
for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
{
if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
&& gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
== goto_dest)
break;
}
/* Not found? Stop. */
if (gsi_end_p (gsi2))
break;
/* Look one past it. */
gsi_next (&gsi2);
}
/* We're looking for a case label or default label here. */
while (!gsi_end_p (gsi2))
{
stmt = gsi_stmt (gsi2);
if (gimple_code (stmt) == GIMPLE_LABEL)
{
tree label = gimple_label_label (as_a <glabel *> (stmt));
if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
{
found = true;
break;
}
}
else
/* Something other than a label. That's not expected. */
break;
gsi_next (&gsi2);
}
if (!found)
warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
"a case label or default label");
}
break;
default:
break;
}
return NULL_TREE;
}
/* Expand all FALLTHROUGH () calls in SEQ. */
static void
expand_FALLTHROUGH (gimple_seq *seq_p)
{
struct walk_stmt_info wi;
memset (&wi, 0, sizeof (wi));
walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
}
/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
branch to. */
static enum gimplify_status
gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
{
tree switch_expr = *expr_p;
gimple_seq switch_body_seq = NULL;
enum gimplify_status ret;
tree index_type = TREE_TYPE (switch_expr);
if (index_type == NULL_TREE)
index_type = TREE_TYPE (SWITCH_COND (switch_expr));
ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
fb_rvalue);
if (ret == GS_ERROR || ret == GS_UNHANDLED)
return ret;
if (SWITCH_BODY (switch_expr))
{
Diego Novillo
committed
vec<tree> labels;
vec<tree> saved_labels;
hash_set<tree> *saved_live_switch_vars = NULL;
tree default_case = NULL_TREE;
gswitch *switch_stmt;
/* If someone can be bothered to fill in the labels, they can
be bothered to null out the body too. */
gcc_assert (!SWITCH_LABELS (switch_expr));
/* Save old labels, get new ones from body, then restore the old
labels. Save all the things from the switch body to append after. */
saved_labels = gimplify_ctxp->case_labels;
Diego Novillo
committed
gimplify_ctxp->case_labels.create (8);
/* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
if (TREE_CODE (SWITCH_BODY (switch_expr)) == BIND_EXPR)
gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
else
gimplify_ctxp->live_switch_vars = NULL;
bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
gimplify_ctxp->in_switch_expr = true;
gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
Marek Polacek
committed
gimplify_ctxp->in_switch_expr = old_in_switch_expr;
Marek Polacek
committed
maybe_warn_switch_unreachable (switch_body_seq);
maybe_warn_implicit_fallthrough (switch_body_seq);
/* Only do this for the outermost GIMPLE_SWITCH. */
if (!gimplify_ctxp->in_switch_expr)
expand_FALLTHROUGH (&switch_body_seq);
Marek Polacek
committed
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
if (gimplify_ctxp->live_switch_vars)
{
gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
delete gimplify_ctxp->live_switch_vars;
}
gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
preprocess_case_label_vec_for_gimple (labels, index_type,
&default_case);
if (!default_case)
default_case
= build_case_label (NULL_TREE, NULL_TREE,
create_artificial_label (UNKNOWN_LOCATION));
new_default = gimple_build_label (CASE_LABEL (default_case));
gimplify_seq_add_stmt (&switch_body_seq, new_default);
}
switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
gimplify_seq_add_stmt (pre_p, switch_stmt);
gimplify_seq_add_seq (pre_p, switch_body_seq);
Diego Novillo
committed
labels.release ();
else
gcc_assert (SWITCH_LABELS (switch_expr));
return GS_ALL_DONE;
/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
static enum gimplify_status
gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
{
gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
== current_function_decl);
glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p));
gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
gimplify_seq_add_stmt (pre_p, label_stmt);
return GS_ALL_DONE;
}
/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
static enum gimplify_status
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
/* Invalid programs can play Duff's Device type games with, for example,
#pragma omp parallel. At least in the C front end, we don't
detect such invalid branches until after gimplification, in the
diagnose_omp_blocks pass. */
for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
Diego Novillo
committed
if (ctxp->case_labels.exists ())
label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
Diego Novillo
committed
ctxp->case_labels.safe_push (*expr_p);
gimplify_seq_add_stmt (pre_p, label_stmt);
return GS_ALL_DONE;
}
/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
if necessary. */
tree
build_and_jump (tree *label_p)
{
if (label_p == NULL)
/* If there's nowhere to jump, just fall through. */
return NULL_TREE;
if (*label_p == NULL_TREE)
{
tree label = create_artificial_label (UNKNOWN_LOCATION);
*label_p = label;
}
return build1 (GOTO_EXPR, void_type_node, *label_p);
}
/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
This also involves building a label to jump to and communicating it to
gimplify_loop_expr through gimplify_ctxp->exit_label. */
static enum gimplify_status
gimplify_exit_expr (tree *expr_p)
{
tree cond = TREE_OPERAND (*expr_p, 0);
tree expr;
expr = build_and_jump (&gimplify_ctxp->exit_label);
expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
*expr_p = expr;
return GS_OK;
}
Richard Henderson
committed
/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
different from its canonical type, wrap the whole thing inside a
NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
type.
Richard Henderson
committed
The canonical type of a COMPONENT_REF is the type of the field being
referenced--unless the field is a bit-field which can be read directly
in a smaller mode, in which case the canonical type is the
sign-appropriate type corresponding to that mode. */
Richard Henderson
committed
static void
canonicalize_component_ref (tree *expr_p)
Richard Henderson
committed
tree expr = *expr_p;
tree type;
gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
Richard Henderson
committed
if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
else
type = TREE_TYPE (TREE_OPERAND (expr, 1));
Richard Guenther
committed
/* One could argue that all the stuff below is not necessary for
the non-bitfield case and declare it a FE error if type
adjustment would be needed. */
Richard Henderson
committed
if (TREE_TYPE (expr) != type)
Richard Guenther
committed
#ifdef ENABLE_TYPES_CHECKING
Richard Henderson
committed
tree old_type = TREE_TYPE (expr);
Richard Guenther
committed
#endif
int type_quals;
/* We need to preserve qualifiers and propagate them from
operand 0. */
type_quals = TYPE_QUALS (type)
| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
if (TYPE_QUALS (type) != type_quals)
type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
Richard Henderson
committed
/* Set the type of the COMPONENT_REF to the underlying type. */
TREE_TYPE (expr) = type;
Richard Guenther
committed
#ifdef ENABLE_TYPES_CHECKING
/* It is now a FE error, if the conversion from the canonical
type to the original expression type is not useless. */
gcc_assert (useless_type_conversion_p (old_type, type));
#endif
Richard Henderson
committed
}
}
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo to a pointer
to foo, embed that change in the ADDR_EXPR by converting
Richard Henderson
committed
T array[U];
(T *)&array
==>
&array[L]
where L is the lower bound. For simplicity, only do this for constant
Richard Guenther
committed
lower bound.
The constraint is that the type of &array[L] is trivially convertible
to T *. */
Richard Henderson
committed
static void
canonicalize_addr_expr (tree *expr_p)
{
tree expr = *expr_p;
tree addr_expr = TREE_OPERAND (expr, 0);
Richard Guenther
committed
tree datype, ddatype, pddatype;
Richard Guenther
committed
/* We simplify only conversions from an ADDR_EXPR to a pointer type. */
if (!POINTER_TYPE_P (TREE_TYPE (expr))
|| TREE_CODE (addr_expr) != ADDR_EXPR)
Richard Henderson
committed
return;
Richard Henderson
committed
/* The addr_expr type should be a pointer to an array. */
Richard Guenther
committed
datype = TREE_TYPE (TREE_TYPE (addr_expr));
Richard Henderson
committed
if (TREE_CODE (datype) != ARRAY_TYPE)
return;
Richard Guenther
committed
/* The pointer to element type shall be trivially convertible to
the expression pointer type. */
Richard Henderson
committed
ddatype = TREE_TYPE (datype);
Richard Guenther
committed
pddatype = build_pointer_type (ddatype);
if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
pddatype))
Richard Henderson
committed
return;
Richard Henderson
committed
/* The lower bound and element sizes must be constant. */
Richard Guenther
committed
if (!TYPE_SIZE_UNIT (ddatype)
|| TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
Richard Henderson
committed
|| !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
|| TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
return;
Richard Henderson
committed
/* All checks succeeded. Build a new node to merge the cast. */
Richard Guenther
committed
*expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
Richard Henderson
committed
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
Richard Guenther
committed
NULL_TREE, NULL_TREE);
Richard Guenther
committed
*expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
/* We can have stripped a required restrict qualifier above. */
if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
Richard Henderson
committed
}
Richard Henderson
committed
/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
underneath as appropriate. */
Richard Henderson
committed
static enum gimplify_status
gimplify_conversion (tree *expr_p)
location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (CONVERT_EXPR_P (*expr_p));
/* Then strip away all but the outermost conversion. */
STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
/* And remove the outermost conversion if it's useless. */
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
Richard Henderson
committed
{
tree sub = TREE_OPERAND (*expr_p, 0);
Richard Henderson
committed
/* If a NOP conversion is changing the type of a COMPONENT_REF
expression, then canonicalize its type now in order to expose more
redundant conversions. */
if (TREE_CODE (sub) == COMPONENT_REF)
canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
Richard Henderson
committed
/* If a NOP conversion is changing a pointer to array of foo
to a pointer to foo, embed that change in the ADDR_EXPR. */
else if (TREE_CODE (sub) == ADDR_EXPR)
canonicalize_addr_expr (expr_p);
}
Richard Guenther
committed
/* If we have a conversion to a non-register type force the
use of a VIEW_CONVERT_EXPR instead. */
if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
*expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
TREE_OPERAND (*expr_p, 0));
Richard Guenther
committed
Richard Biener
committed
/* Canonicalize CONVERT_EXPR to NOP_EXPR. */
if (TREE_CODE (*expr_p) == CONVERT_EXPR)
TREE_SET_CODE (*expr_p, NOP_EXPR);
return GS_OK;
}
/* Nonlocal VLAs seen in the current function. */
static hash_set<tree> *nonlocal_vlas;
/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
static tree nonlocal_vla_vars;
/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
DECL_VALUE_EXPR, and it's worth re-examining things. */
static enum gimplify_status
gimplify_var_or_parm_decl (tree *expr_p)
{
tree decl = *expr_p;
/* ??? If this is a local variable, and it has not been seen in any
outer BIND_EXPR, then it's probably the result of a duplicate
declaration, for which we've already issued an error. It would
be really nice if the front end wouldn't leak these at all.
Currently the only known culprit is C++ destructors, as seen
in g++.old-deja/g++.jason/binding.C. */
Jakub Jelinek
committed
if (VAR_P (decl)
&& !DECL_SEEN_IN_BIND_EXPR_P (decl)
&& !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
&& decl_function_context (decl) == current_function_decl)
{
return GS_ERROR;
}
/* When within an OMP context, notice uses of variables. */
if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
return GS_ALL_DONE;
/* If the decl is an alias for another expression, substitute it now. */
if (DECL_HAS_VALUE_EXPR_P (decl))
{
tree value_expr = DECL_VALUE_EXPR (decl);
/* For referenced nonlocal VLAs add a decl for debugging purposes
to the current function. */
Jakub Jelinek
committed
if (VAR_P (decl)
&& TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
&& nonlocal_vlas != NULL
&& TREE_CODE (value_expr) == INDIRECT_REF
&& TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
&& decl_function_context (decl) != current_function_decl)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
while (ctx
&& (ctx->region_type == ORT_WORKSHARE
Nathan Sidwell
committed
|| ctx->region_type == ORT_SIMD
|| ctx->region_type == ORT_ACC))
ctx = ctx->outer_context;
if (!ctx && !nonlocal_vlas->add (decl))
tree copy = copy_node (decl);
lang_hooks.dup_lang_specific_decl (copy);
SET_DECL_RTL (copy, 0);
TREE_USED (copy) = 1;
DECL_CHAIN (copy) = nonlocal_vla_vars;
nonlocal_vla_vars = copy;
SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
DECL_HAS_VALUE_EXPR_P (copy) = 1;
}
}
*expr_p = unshare_expr (value_expr);
return GS_OK;
}
return GS_ALL_DONE;
}
/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
static void
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
recalculate_side_effects (tree t)
{
enum tree_code code = TREE_CODE (t);
int len = TREE_OPERAND_LENGTH (t);
int i;
switch (TREE_CODE_CLASS (code))
{
case tcc_expression:
switch (code)
{
case INIT_EXPR:
case MODIFY_EXPR:
case VA_ARG_EXPR:
case PREDECREMENT_EXPR:
case PREINCREMENT_EXPR:
case POSTDECREMENT_EXPR:
case POSTINCREMENT_EXPR:
/* All of these have side-effects, no matter what their
operands are. */
return;
default:
break;
}
/* Fall through. */
case tcc_comparison: /* a comparison expression */
case tcc_unary: /* a unary arithmetic expression */
case tcc_binary: /* a binary arithmetic expression */
case tcc_reference: /* a reference */
case tcc_vl_exp: /* a function call */
TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
for (i = 0; i < len; ++i)
{
tree op = TREE_OPERAND (t, i);
if (op && TREE_SIDE_EFFECTS (op))
TREE_SIDE_EFFECTS (t) = 1;
}
break;
case tcc_constant:
/* No side-effects. */
return;
default:
gcc_unreachable ();
}
}
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
node *EXPR_P.
compound_lval
: min_lval '[' val ']'
| min_lval '.' ID
| compound_lval '[' val ']'
| compound_lval '.' ID
This is not part of the original SIMPLE definition, which separates
array and member references, but it seems reasonable to handle them
together. Also, this way we don't run into problems with union
aliasing; gcc requires that for accesses through a union to alias, the
union reference must be explicit, which was not always the case when we
were splitting up array and member refs.
PRE_P points to the sequence where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the sequence where side effects that must happen after
*EXPR_P should be stored. */
static enum gimplify_status
gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
fallback_t fallback)
enum gimplify_status ret = GS_ALL_DONE, tret;
int i;
location_t loc = EXPR_LOCATION (*expr_p);
tree expr = *expr_p;
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
{
restart:
/* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
if (TREE_CODE (*p) == INDIRECT_REF)
*p = fold_indirect_ref_loc (loc, *p);
if (handled_component_p (*p))
;
/* Expand DECL_VALUE_EXPR now. In some cases that may expose
additional COMPONENT_REFs. */
Jakub Jelinek
committed
else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
&& gimplify_var_or_parm_decl (p) == GS_OK)
goto restart;
else
Diego Novillo
committed
expr_stack.safe_push (*p);
Diego Novillo
committed
gcc_assert (expr_stack.length ());
Richard Kenner
committed
Diego Novillo
committed
/* Now EXPR_STACK is a stack of pointers to all the refs we've
walked through and P points to the innermost expression.
Java requires that we elaborated nodes in source order. That
means we must gimplify the inner expression followed by each of
the indices, in order. But we can't gimplify the inner
expression until we deal with any variable bounds, sizes, or
positions in order to deal with PLACEHOLDER_EXPRs.
So we do this in three steps. First we deal with the annotations
for any variables in the components, then we gimplify the base,
then we gimplify any indices, from left to right. */
Diego Novillo
committed
for (i = expr_stack.length () - 1; i >= 0; i--)
Diego Novillo
committed
tree t = expr_stack[i];
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
/* Gimplify the low bound and element type size and put them into
the ARRAY_REF. If these values are set, they have already been
gimplified. */
if (TREE_OPERAND (t, 2) == NULL_TREE)
{
Richard Henderson
committed
tree low = unshare_expr (array_ref_low_bound (t));
if (!is_gimple_min_invariant (low))
{
TREE_OPERAND (t, 2) = low;
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
post_p, is_gimple_reg,
fb_rvalue);
ret = MIN (ret, tret);
}
}
Jakub Jelinek
committed
else
{
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
is_gimple_reg, fb_rvalue);
ret = MIN (ret, tret);
}
Jakub Jelinek
committed
if (TREE_OPERAND (t, 3) == NULL_TREE)
{
tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
tree elmt_size = unshare_expr (array_ref_element_size (t));
tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
/* Divide the element size by the alignment of the element
type (above). */
elmt_size
= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
Richard Henderson
committed
if (!is_gimple_min_invariant (elmt_size))
{
TREE_OPERAND (t, 3) = elmt_size;
tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
post_p, is_gimple_reg,
fb_rvalue);
ret = MIN (ret, tret);
}
Jakub Jelinek
committed
else
{
tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
is_gimple_reg, fb_rvalue);
ret = MIN (ret, tret);
}
else if (TREE_CODE (t) == COMPONENT_REF)
{
/* Set the field offset into T and gimplify it. */
Jakub Jelinek
committed
if (TREE_OPERAND (t, 2) == NULL_TREE)
{
tree offset = unshare_expr (component_ref_field_offset (t));
tree field = TREE_OPERAND (t, 1);
tree factor
= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
/* Divide the offset by its alignment. */
offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
Richard Henderson
committed
if (!is_gimple_min_invariant (offset))
{
TREE_OPERAND (t, 2) = offset;
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
post_p, is_gimple_reg,
fb_rvalue);
ret = MIN (ret, tret);
}
}
Jakub Jelinek
committed
else
{
tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
is_gimple_reg, fb_rvalue);
ret = MIN (ret, tret);
}
}
}
/* Step 2 is to gimplify the base expression. Make sure lvalue is set
so as to match the min_lval predicate. Failure to do so may result
in the creation of large aggregate temporaries. */
tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
fallback | fb_lvalue);
ret = MIN (ret, tret);
Eric Botcazou
committed
/* And finally, the indices and operands of ARRAY_REF. During this
loop we also remove any useless conversions. */
Diego Novillo
committed
for (; expr_stack.length () > 0; )
Diego Novillo
committed
tree t = expr_stack.pop ();
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
/* Gimplify the dimension. */
if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
{
tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
is_gimple_val, fb_rvalue);
ret = MIN (ret, tret);
}
}
STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
/* The innermost expression P may have originally had
TREE_SIDE_EFFECTS set which would have caused all the outer
expressions in *EXPR_P leading to P to also have had
TREE_SIDE_EFFECTS set. */
recalculate_side_effects (t);
}
/* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
Richard Henderson
committed
if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
{
canonicalize_component_ref (expr_p);
}
Diego Novillo
committed
expr_stack.release ();
gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
/* Gimplify the self modifying expression pointed to by EXPR_P
(++, --, +=, -=).
PRE_P points to the list where side effects that must happen before
*EXPR_P should be stored.
POST_P points to the list where side effects that must happen after
*EXPR_P should be stored.
WANT_VALUE is nonzero iff we want to use the value of this expression
in another expression.
ARITH_TYPE is the type the computation should be performed in. */
enum gimplify_status
gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
bool want_value, tree arith_type)
{
enum tree_code code;
tree lhs, lvalue, rhs, t1;
gimple_seq post = NULL, *orig_post_p = post_p;
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
location_t loc = EXPR_LOCATION (*expr_p);
code = TREE_CODE (*expr_p);
gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
|| code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
/* Prefix or postfix? */
if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
/* Faster to treat as prefix if result is not used. */
postfix = want_value;
else
postfix = false;
/* For postfix, make sure the inner expression's post side effects
are executed after side effects from this expression. */
if (postfix)
post_p = &post;
/* Add or subtract? */
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
arith_code = PLUS_EXPR;
else
arith_code = MINUS_EXPR;
/* Gimplify the LHS into a GIMPLE lvalue. */
lvalue = TREE_OPERAND (*expr_p, 0);
ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
/* Extract the operands to the arithmetic operation. */
lhs = lvalue;
rhs = TREE_OPERAND (*expr_p, 1);
/* For postfix operator, we evaluate the LHS to an rvalue and then use
that as the result value and in the postqueue operation. */
if (postfix)
{
ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
if (ret == GS_ERROR)
return ret;
lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
}
/* For POINTERs increment, use POINTER_PLUS_EXPR. */
if (POINTER_TYPE_P (TREE_TYPE (lhs)))
{
rhs = convert_to_ptrofftype_loc (loc, rhs);
rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
else
t1 = fold_convert (TREE_TYPE (*expr_p),
fold_build2 (arith_code, arith_type,
fold_convert (arith_type, lhs),
fold_convert (arith_type, rhs)));
gimplify_assign (lvalue, t1, pre_p);
gimplify_seq_add_seq (orig_post_p, post);
*expr_p = lhs;
return GS_ALL_DONE;
}
else
{
*expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
return GS_OK;
}
}
/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
static void
maybe_with_size_expr (tree *expr_p)
{
Richard Kenner
committed
tree expr = *expr_p;
tree type = TREE_TYPE (expr);
tree size;
Richard Kenner
committed
/* If we've already wrapped this or the type is error_mark_node, we can't do
anything. */
if (TREE_CODE (expr) == WITH_SIZE_EXPR
|| type == error_mark_node)
Richard Kenner
committed
/* If the size isn't known or is a constant, we have nothing to do. */
Richard Kenner
committed
if (!size || TREE_CODE (size) == INTEGER_CST)
return;
/* Otherwise, make a WITH_SIZE_EXPR. */
size = unshare_expr (size);
size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
*expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
Store any side-effects in PRE_P. CALL_LOCATION is the location of
Richard Biener
committed
the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
gimplified to an SSA name. */
Jason Merrill
committed
enum gimplify_status
Richard Biener
committed
gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
bool allow_ssa)