Newer
Older
/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
tree representation into the GIMPLE form.
Copyright (C) 2002-2022 Free Software Foundation, Inc.
Major work done by Sebastian Pop <s.pop@laposte.net>,
Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "target.h"
#include "rtl.h"
#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
#include "cgraph.h"
#include "tree-pretty-print.h"
#include "diagnostic-core.h"
#include "alias.h"
#include "calls.h"
#include "varasm.h"
#include "stmt.h"
#include "expr.h"
#include "gimple-iterator.h"
#include "gimple-fold.h"
#include "tree-eh.h"
Andrew MacLeod
committed
#include "gimplify.h"
#include "stor-layout.h"
#include "print-tree.h"
#include "tree-iterator.h"
#include "tree-inline.h"
#include "langhooks.h"
#include "tree-cfg.h"
#include "tree-ssa.h"
#include "tree-hash-traits.h"
#include "omp-low.h"
#include "gomp-constants.h"
Jakub Jelinek
committed
#include "gimple-walk.h"
#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
#include "builtins.h"
#include "stringpool.h"
#include "attribs.h"
#include "omp-offload.h"
#include "context.h"
/* Hash set of poisoned variables in a bind expr. */
static hash_set<tree> *asan_poisoned_variables = NULL;
GOVD_SEEN = 0x000001,
GOVD_EXPLICIT = 0x000002,
GOVD_SHARED = 0x000004,
GOVD_PRIVATE = 0x000008,
GOVD_FIRSTPRIVATE = 0x000010,
GOVD_LASTPRIVATE = 0x000020,
GOVD_REDUCTION = 0x000040,
GOVD_LOCAL = 0x00080,
GOVD_MAP = 0x000100,
GOVD_DEBUG_PRIVATE = 0x000200,
GOVD_PRIVATE_OUTER_REF = 0x000400,
GOVD_LINEAR = 0x000800,
GOVD_ALIGNED = 0x001000,
/* Flag for GOVD_MAP: don't copy back. */
GOVD_MAP_TO_ONLY = 0x002000,
Jakub Jelinek
committed
/* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
Jakub Jelinek
committed
GOVD_MAP_0LEN_ARRAY = 0x008000,
/* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
GOVD_MAP_ALWAYS_TO = 0x010000,
Jakub Jelinek
committed
/* Flag for shared vars that are or might be stored to in the region. */
GOVD_WRITTEN = 0x020000,
Jakub Jelinek
committed
/* Flag for GOVD_MAP, if it is a forced mapping. */
GOVD_MAP_FORCE = 0x040000,
/* Flag for GOVD_MAP: must be present already. */
GOVD_MAP_FORCE_PRESENT = 0x080000,
/* Flag for GOVD_MAP: only allocate. */
GOVD_MAP_ALLOC_ONLY = 0x100000,
/* Flag for GOVD_MAP: only copy back. */
GOVD_MAP_FROM_ONLY = 0x200000,
GOVD_NONTEMPORAL = 0x400000,
/* Flag for GOVD_LASTPRIVATE: conditional modifier. */
GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
GOVD_CONDTEMP = 0x1000000,
/* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
GOVD_REDUCTION_INSCAN = 0x2000000,
/* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
| GOVD_LOCAL)
enum omp_region_type
{
Nathan Sidwell
committed
ORT_WORKSHARE = 0x00,
ORT_TASKGROUP = 0x01,
ORT_SIMD = 0x04,
Nathan Sidwell
committed
ORT_PARALLEL = 0x08,
ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
Nathan Sidwell
committed
ORT_TASK = 0x10,
ORT_UNTIED_TASK = ORT_TASK | 1,
ORT_TASKLOOP = ORT_TASK | 2,
ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
Nathan Sidwell
committed
ORT_TEAMS = 0x20,
ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
ORT_HOST_TEAMS = ORT_TEAMS | 2,
ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
Nathan Sidwell
committed
/* Data region. */
ORT_TARGET_DATA = 0x40,
Nathan Sidwell
committed
/* Data region with offloading. */
ORT_TARGET = 0x80,
ORT_COMBINED_TARGET = ORT_TARGET | 1,
ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
Nathan Sidwell
committed
/* OpenACC variants. */
ORT_ACC = 0x100, /* A generic OpenACC region. */
Nathan Sidwell
committed
ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
Nathan Sidwell
committed
/* Dummy OpenMP region, used to disable expansion of
DECL_VALUE_EXPRs in taskloop pre body. */
Andrew MacLeod
committed
/* Gimplify hashtable helper. */
struct gimplify_hasher : free_ptr_hash <elt_t>
Andrew MacLeod
committed
{
static inline hashval_t hash (const elt_t *);
static inline bool equal (const elt_t *, const elt_t *);
Andrew MacLeod
committed
};
struct gimplify_ctx
{
struct gimplify_ctx *prev_context;
vec<gbind *> bind_expr_stack;
Andrew MacLeod
committed
tree temps;
gimple_seq conditional_cleanups;
tree exit_label;
tree return_temp;
vec<tree> case_labels;
hash_set<tree> *live_switch_vars;
Andrew MacLeod
committed
/* The formal temporary table. Should this be persistent? */
hash_table<gimplify_hasher> *temp_htab;
Andrew MacLeod
committed
int conditions;
Eric Botcazou
committed
unsigned into_ssa : 1;
unsigned allow_rhs_cond_expr : 1;
unsigned in_cleanup_point_expr : 1;
unsigned keep_stack : 1;
unsigned save_stack : 1;
Andrew MacLeod
committed
};
enum gimplify_defaultmap_kind
{
GDMK_SCALAR,
GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
GDMK_AGGREGATE,
GDMK_ALLOCATABLE,
GDMK_POINTER
};
struct gimplify_omp_ctx *outer_context;
splay_tree variables;
/* Iteration variables in an OMP_FOR. */
vec<tree> loop_iter_var;
location_t location;
enum omp_clause_default_kind default_kind;
enum omp_region_type region_type;
enum tree_code code;
bool target_firstprivatize_array_bases;
Jakub Jelinek
committed
bool add_safelen1;
bool order_concurrent;
bool has_depend;
Jakub Jelinek
committed
bool in_for_exprs;
int defaultmap[5];
Andrew MacLeod
committed
static struct gimplify_ctx *gimplify_ctxp;
/* Forward declaration. */
static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
static hash_map<tree, tree> *oacc_declare_returns;
Richard Biener
committed
static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
bool (*) (tree), fallback_t, bool);
static void prepare_gimple_addressable (tree *, gimple_seq *);
/* Shorter alias name for the above function for use in gimplify.cc
only. */
static inline void
gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_seq_add_stmt_without_update (seq_p, gs);
}
/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
NULL, a new sequence is allocated. This function is
similar to gimple_seq_add_seq, but does not scan the operands.
During gimplification, we need to manipulate statement sequences
before the def/use vectors have been constructed. */
static void
gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
{
gimple_stmt_iterator si;
if (src == NULL)
return;
si = gsi_last (*dst_p);
gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
}
Andrew MacLeod
committed
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
and popping gimplify contexts. */
static struct gimplify_ctx *ctx_pool = NULL;
/* Return a gimplify context struct from the pool. */
static inline struct gimplify_ctx *
ctx_alloc (void)
{
struct gimplify_ctx * c = ctx_pool;
if (c)
ctx_pool = c->prev_context;
else
c = XNEW (struct gimplify_ctx);
memset (c, '\0', sizeof (*c));
return c;
}
/* Put gimplify context C back into the pool. */
static inline void
ctx_free (struct gimplify_ctx *c)
{
c->prev_context = ctx_pool;
ctx_pool = c;
}
/* Free allocated ctx stack memory. */
void
free_gimplify_stack (void)
{
struct gimplify_ctx *c;
while ((c = ctx_pool))
{
ctx_pool = c->prev_context;
free (c);
}
}
/* Set up a context for the gimplifier. */
void
Andrew MacLeod
committed
push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
Andrew MacLeod
committed
struct gimplify_ctx *c = ctx_alloc ();
c->prev_context = gimplify_ctxp;
gimplify_ctxp = c;
Andrew MacLeod
committed
gimplify_ctxp->into_ssa = in_ssa;
gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
}
/* Tear down a context for the gimplifier. If BODY is non-null, then
put the temporaries into the outer BIND_EXPR. Otherwise, put them
in the local_decls.
BODY is not a sequence, but the first tuple in a sequence. */
Diego Novillo
committed
gcc_assert (c
&& (!c->bind_expr_stack.exists ()
|| c->bind_expr_stack.is_empty ()));
c->bind_expr_stack.release ();
declare_vars (c->temps, body, false);
delete c->temp_htab;
c->temp_htab = NULL;
Andrew MacLeod
committed
ctx_free (c);
/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
gimple_push_bind_expr (gbind *bind_stmt)
Diego Novillo
committed
gimplify_ctxp->bind_expr_stack.reserve (8);
gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
/* Pop the first element off the stack of bindings. */
gimple_pop_bind_expr (void)
{
Diego Novillo
committed
gimplify_ctxp->bind_expr_stack.pop ();
/* Return the first element of the stack of bindings. */
gimple_current_bind_expr (void)
{
Diego Novillo
committed
return gimplify_ctxp->bind_expr_stack.last ();
/* Return the stack of bindings created during gimplification. */
gimple_bind_expr_stack (void)
{
return gimplify_ctxp->bind_expr_stack;
/* Return true iff there is a COND_EXPR between us and the innermost
CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
static bool
gimple_conditional_context (void)
{
return gimplify_ctxp->conditions > 0;
}
/* Note that we've entered a COND_EXPR. */
static void
gimple_push_condition (void)
{
#ifdef ENABLE_GIMPLE_CHECKING
Andrew Pinski
committed
if (gimplify_ctxp->conditions == 0)
gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
Andrew Pinski
committed
#endif
++(gimplify_ctxp->conditions);
}
/* Note that we've left a COND_EXPR. If we're back at unconditional scope
now, add any conditional cleanups we've seen to the prequeue. */
static void
gimple_pop_condition (gimple_seq *pre_p)
{
int conds = --(gimplify_ctxp->conditions);
gcc_assert (conds >= 0);
gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
gimplify_ctxp->conditional_cleanups = NULL;
/* A stable comparison routine for use with splay trees and DECLs. */
static int
splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
{
tree a = (tree) xa;
tree b = (tree) xb;
return DECL_UID (a) - DECL_UID (b);
}
/* Create a new omp construct that deals with variable remapping. */
static struct gimplify_omp_ctx *
new_omp_context (enum omp_region_type region_type)
{
struct gimplify_omp_ctx *c;
c = XCNEW (struct gimplify_omp_ctx);
c->outer_context = gimplify_omp_ctxp;
c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
c->privatized_types = new hash_set<tree>;
c->region_type = region_type;
Jakub Jelinek
committed
if ((region_type & ORT_TASK) == 0)
c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
else
c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
c->defaultmap[GDMK_POINTER] = GOVD_MAP;
return c;
}
/* Destroy an omp construct that deals with variable remapping. */
static void
delete_omp_context (struct gimplify_omp_ctx *c)
{
splay_tree_delete (c->variables);
c->loop_iter_var.release ();
XDELETE (c);
}
static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
/* Both gimplify the statement T and append it to *SEQ_P. This function
behaves exactly as gimplify_stmt, but you don't have to pass T as a
reference. */
gimplify_and_add (tree t, gimple_seq *seq_p)
{
gimplify_stmt (&t, seq_p);
}
/* Gimplify statement T into sequence *SEQ_P, and return the first
tuple in the sequence of generated tuples for this statement.
Return NULL if gimplifying T produced no tuples. */
gimplify_and_return_first (tree t, gimple_seq *seq_p)
gimple_stmt_iterator last = gsi_last (*seq_p);
gimplify_and_add (t, seq_p);
if (!gsi_end_p (last))
{
gsi_next (&last);
return gsi_stmt (last);
}
else
return gimple_seq_first_stmt (*seq_p);
Richard Guenther
committed
/* Returns true iff T is a valid RHS for an assignment to an un-renamed
LHS, or for a call argument. */
static bool
is_gimple_mem_rhs (tree t)
{
/* If we're dealing with a renamable type, either source or dest must be
a renamed variable. */
if (is_gimple_reg_type (TREE_TYPE (t)))
return is_gimple_val (t);
else
return is_gimple_val (t) || is_gimple_lvalue (t);
}
/* Return true if T is a CALL_EXPR or an expression that can be
Manuel López-Ibáñez
committed
assigned to a temporary. Note that this predicate should only be
used during gimplification. See the rationale for this in
gimplify_modify_expr. */
static bool
is_gimple_reg_rhs_or_call (tree t)
return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
|| TREE_CODE (t) == CALL_EXPR);
}
/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
this predicate should only be used during gimplification. See the
rationale for this in gimplify_modify_expr. */
static bool
is_gimple_mem_rhs_or_call (tree t)
{
/* If we're dealing with a renamable type, either source or dest must be
Richard Guenther
committed
a renamed variable. */
if (is_gimple_reg_type (TREE_TYPE (t)))
return is_gimple_val (t);
else
Richard Biener
committed
return (is_gimple_val (t)
|| is_gimple_lvalue (t)
|| TREE_CLOBBER_P (t)
|| TREE_CODE (t) == CALL_EXPR);
Richard Guenther
committed
/* Create a temporary with a name derived from VAL. Subroutine of
lookup_tmp_var; nobody else should call this function. */
static inline tree
create_tmp_from_val (tree val)
Richard Guenther
committed
{
/* Drop all qualifiers and address-space information from the value type. */
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
tree var = create_tmp_var (type, get_name (val));
return var;
}
/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
Richard Guenther
committed
static tree
lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
Richard Guenther
committed
{
tree ret;
/* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
gcc_assert (!is_formal || !not_gimple_reg);
Richard Guenther
committed
/* If not optimizing, never really reuse a temporary. local-alloc
won't allocate any variable that is used in more than one basic
block, which means it will go into memory, causing much extra
work in reload and final and poorer code generation, outweighing
the extra memory allocation here. */
if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
{
ret = create_tmp_from_val (val);
DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
}
Richard Guenther
committed
else
{
elt_t elt, *elt_p;
Lawrence Crowl
committed
elt_t **slot;
Richard Guenther
committed
elt.val = val;
if (!gimplify_ctxp->temp_htab)
gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
Richard Guenther
committed
if (*slot == NULL)
{
elt_p = XNEW (elt_t);
elt_p->val = val;
elt_p->temp = ret = create_tmp_from_val (val);
Lawrence Crowl
committed
*slot = elt_p;
Richard Guenther
committed
}
else
{
Lawrence Crowl
committed
elt_p = *slot;
Richard Guenther
committed
ret = elt_p->temp;
}
}
return ret;
}
/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
bool is_formal, bool allow_ssa, bool not_gimple_reg)
/* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
fb_rvalue);
Richard Biener
committed
if (allow_ssa
&& gimplify_ctxp->into_ssa
Richard Guenther
committed
&& is_gimple_reg_type (TREE_TYPE (val)))
Richard Biener
committed
{
t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
if (! gimple_in_ssa_p (cfun))
{
const char *name = get_name (val);
if (name)
SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
}
}
Richard Guenther
committed
else
t = lookup_tmp_var (val, is_formal, not_gimple_reg);
Richard Henderson
committed
Jan Hubicka
committed
mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
/* gimplify_modify_expr might want to reduce this further. */
gimplify_and_add (mod, pre_p);
ggc_free (mod);
/* Return a formal temporary variable initialized with VAL. PRE_P is as
in gimplify_expr. Only use this function if:
1) The value of the unfactored expression represented by VAL will not
change between the initialization and use of the temporary, and
2) The temporary will not be otherwise modified.
For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
and #2 means it is inappropriate for && temps.
For other cases, use get_initialized_tmp_var instead. */
get_formal_tmp_var (tree val, gimple_seq *pre_p)
return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
/* Return a temporary variable initialized with VAL. PRE_P and POST_P
are as in gimplify_expr. */
tree
get_initialized_tmp_var (tree val, gimple_seq *pre_p,
gimple_seq *post_p /* = NULL */,
bool allow_ssa /* = true */)
return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
generate debug info for them; otherwise don't. */
declare_vars (tree vars, gimple *gs, bool debug_info)
{
tree last = vars;
if (last)
{
tree temps, block;
gbind *scope = as_a <gbind *> (gs);
temps = nreverse (last);
Jakub Jelinek
committed
block = gimple_bind_block (scope);
gcc_assert (!block || TREE_CODE (block) == BLOCK);
if (!block || !debug_info)
{
DECL_CHAIN (last) = gimple_bind_vars (scope);
gimple_bind_set_vars (scope, temps);
}
else
{
/* We need to attach the nodes both to the BIND_EXPR and to its
associated BLOCK for debugging purposes. The key point here
is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
if (BLOCK_VARS (block))
BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
else
{
gimple_bind_set_vars (scope,
chainon (gimple_bind_vars (scope), temps));
BLOCK_VARS (block) = temps;
}
}
Olivier Hainque
committed
/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
no such upper bound can be obtained. */
static void
force_constant_size (tree var)
{
/* The only attempt we make is by querying the maximum size of objects
of the variable's type. */
HOST_WIDE_INT max_size;
Jakub Jelinek
committed
gcc_assert (VAR_P (var));
Olivier Hainque
committed
max_size = max_int_size_in_bytes (TREE_TYPE (var));
gcc_assert (max_size >= 0);
DECL_SIZE_UNIT (var)
= build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
DECL_SIZE (var)
= build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
}
/* Push the temporary variable TMP into the current binding. */
void
gimple_add_tmp_var_fn (struct function *fn, tree tmp)
{
gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
/* Later processing assumes that the object size is constant, which might
not be true at this point. Force the use of a constant upper bound in
this case. */
if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
force_constant_size (tmp);
DECL_CONTEXT (tmp) = fn->decl;
DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
record_vars_into (tmp, fn->decl);
}
/* Push the temporary variable TMP into the current binding. */
void
gimple_add_tmp_var (tree tmp)
{
gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
Olivier Hainque
committed
/* Later processing assumes that the object size is constant, which might
not be true at this point. Force the use of a constant upper bound in
this case. */
if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
Olivier Hainque
committed
force_constant_size (tmp);
DECL_CONTEXT (tmp) = current_function_decl;
DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
if (gimplify_ctxp)
{
gimplify_ctxp->temps = tmp;
/* Mark temporaries local within the nearest enclosing parallel. */
if (gimplify_omp_ctxp)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
Jakub Jelinek
committed
int flag = GOVD_LOCAL | GOVD_SEEN;
while (ctx
&& (ctx->region_type == ORT_WORKSHARE
|| ctx->region_type == ORT_TASKGROUP
Nathan Sidwell
committed
|| ctx->region_type == ORT_SIMD
|| ctx->region_type == ORT_ACC))
{
if (ctx->region_type == ORT_SIMD
&& TREE_ADDRESSABLE (tmp)
&& !TREE_STATIC (tmp))
{
if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
ctx->add_safelen1 = true;
Jakub Jelinek
committed
else if (ctx->in_for_exprs)
flag = GOVD_PRIVATE;
Jakub Jelinek
committed
else
flag = GOVD_PRIVATE | GOVD_SEEN;
break;
}
ctx = ctx->outer_context;
}
Jakub Jelinek
committed
omp_add_variable (ctx, tmp, flag);
}
else if (cfun)
record_vars (tmp);
else
{
gimple_seq body_seq;
/* This case is for nested functions. We need to expose the locals
they create. */
body_seq = gimple_body (current_function_decl);
declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
}
}
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
nodes that are referenced more than once in GENERIC functions. This is
necessary because gimplification (translation into GIMPLE) is performed
by modifying tree nodes in-place, so gimplication of a shared node in a
first context could generate an invalid GIMPLE form in a second context.
This is achieved with a simple mark/copy/unmark algorithm that walks the
GENERIC representation top-down, marks nodes with TREE_VISITED the first
time it encounters them, duplicates them if they already have TREE_VISITED
set, and finally removes the TREE_VISITED marks it has set.
The algorithm works only at the function level, i.e. it generates a GENERIC
representation of a function with no nodes shared within the function when
passed a GENERIC function (except for nodes that are allowed to be shared).
At the global level, it is also necessary to unshare tree nodes that are
referenced in more than one function, for the same aforementioned reason.
This requires some cooperation from the front-end. There are 2 strategies:
1. Manual unsharing. The front-end needs to call unshare_expr on every
expression that might end up being shared across functions.
2. Deep unsharing. This is an extension of regular unsharing. Instead
of calling unshare_expr on expressions that might be shared across
functions, the front-end pre-marks them with TREE_VISITED. This will
ensure that they are unshared on the first reference within functions
when the regular unsharing algorithm runs. The counterpart is that
this algorithm must look deeper than for manual unsharing, which is
specified by LANG_HOOKS_DEEP_UNSHARING.
If there are only few specific cases of node sharing across functions, it is
probably easier for a front-end to unshare the expressions manually. On the
contrary, if the expressions generated at the global level are as widespread
as expressions generated within functions, deep unsharing is very likely the
way to go. */
/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
These nodes model computations that must be done once. If we were to
unshare something like SAVE_EXPR(i++), the gimplification process would
create wrong code. However, if DATA is non-null, it must hold a pointer
set that is used to unshare the subtrees of these nodes. */
static tree
mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
{
tree t = *tp;
enum tree_code code = TREE_CODE (t);
/* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
copy their subtrees if we can make sure to do it only once. */
if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
if (data && !((hash_set<tree> *)data)->add (t))
;
else
*walk_subtrees = 0;
}
/* Stop at types, decls, constants like copy_tree_r. */
else if (TREE_CODE_CLASS (code) == tcc_type
|| TREE_CODE_CLASS (code) == tcc_declaration
/* Cope with the statement expression extension. */
else if (code == STATEMENT_LIST)
;
/* Leave the bulk of the work to copy_tree_r itself. */
copy_tree_r (tp, walk_subtrees, NULL);
return NULL_TREE;
}
/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
If *TP has been visited already, then *TP is deeply copied by calling
mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
tree t = *tp;
enum tree_code code = TREE_CODE (t);
/* Skip types, decls, and constants. But we do want to look at their
types and the bounds of types. Mark them as visited so we properly
unmark their subtrees on the unmark pass. If we've already seen them,
don't look down further. */
if (TREE_CODE_CLASS (code) == tcc_type
|| TREE_CODE_CLASS (code) == tcc_declaration
|| TREE_CODE_CLASS (code) == tcc_constant)
{
if (TREE_VISITED (t))
*walk_subtrees = 0;
else
TREE_VISITED (t) = 1;
}
/* If this node has been visited already, unshare it and don't look
any deeper. */
else if (TREE_VISITED (t))
walk_tree (tp, mostly_copy_tree_r, data, NULL);
/* Otherwise, mark the node as visited and keep looking. */
TREE_VISITED (t) = 1;
return NULL_TREE;
}
/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
copy_if_shared_r callback unmodified. */
copy_if_shared (tree *tp, void *data)
walk_tree (tp, copy_if_shared_r, data, NULL);
/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
any nested functions. */
static void
unshare_body (tree fndecl)
{
struct cgraph_node *cgn = cgraph_node::get (fndecl);
/* If the language requires deep unsharing, we need a pointer set to make
sure we don't repeatedly unshare subtrees of unshareable nodes. */
hash_set<tree> *visited
= lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
for (cgn = first_nested_function (cgn); cgn;
cgn = next_nested_function (cgn))
}
/* Callback for walk_tree to unmark the visited trees rooted at *TP.
Subtrees are walked until the first unvisited node is encountered. */
static tree
unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp;
/* If this node has been visited, unmark it and keep looking. */
if (TREE_VISITED (t))
TREE_VISITED (t) = 0;
/* Otherwise, don't look any deeper. */
else
*walk_subtrees = 0;
return NULL_TREE;
}
/* Unmark the visited trees rooted at *TP. */