| 1 | /* Lowering pass for OMP directives. Converts OMP directives into explicit |
| 2 | calls to the runtime library (libgomp), data marshalling to implement data |
| 3 | sharing and copying clauses, offloading to accelerators, and more. |
| 4 | |
| 5 | Contributed by Diego Novillo <dnovillo@redhat.com> |
| 6 | |
| 7 | Copyright (C) 2005-2026 Free Software Foundation, Inc. |
| 8 | |
| 9 | This file is part of GCC. |
| 10 | |
| 11 | GCC is free software; you can redistribute it and/or modify it under |
| 12 | the terms of the GNU General Public License as published by the Free |
| 13 | Software Foundation; either version 3, or (at your option) any later |
| 14 | version. |
| 15 | |
| 16 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| 17 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 18 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 19 | for more details. |
| 20 | |
| 21 | You should have received a copy of the GNU General Public License |
| 22 | along with GCC; see the file COPYING3. If not see |
| 23 | <http://www.gnu.org/licenses/>. */ |
| 24 | |
| 25 | #include "config.h" |
| 26 | #include "system.h" |
| 27 | #include "coretypes.h" |
| 28 | #include "backend.h" |
| 29 | #include "target.h" |
| 30 | #include "tree.h" |
| 31 | #include "gimple.h" |
| 32 | #include "tree-pass.h" |
| 33 | #include "ssa.h" |
| 34 | #include "cgraph.h" |
| 35 | #include "pretty-print.h" |
| 36 | #include "diagnostic-core.h" |
| 37 | #include "fold-const.h" |
| 38 | #include "stor-layout.h" |
| 39 | #include "internal-fn.h" |
| 40 | #include "gimple-iterator.h" |
| 41 | #include "gimple-fold.h" |
| 42 | #include "gimplify.h" |
| 43 | #include "gimplify-me.h" |
| 44 | #include "gimple-walk.h" |
| 45 | #include "tree-iterator.h" |
| 46 | #include "tree-inline.h" |
| 47 | #include "langhooks.h" |
| 48 | #include "tree-dfa.h" |
| 49 | #include "tree-ssa.h" |
| 50 | #include "splay-tree.h" |
| 51 | #include "omp-general.h" |
| 52 | #include "omp-low.h" |
| 53 | #include "gimple-low.h" |
| 54 | #include "alloc-pool.h" |
| 55 | #include "symbol-summary.h" |
| 56 | #include "tree-nested.h" |
| 57 | #include "context.h" |
| 58 | #include "gomp-constants.h" |
| 59 | #include "gimple-pretty-print.h" |
| 60 | #include "stringpool.h" |
| 61 | #include "attribs.h" |
| 62 | #include "omp-offload.h" |
| 63 | |
| 64 | /* Lowering of OMP parallel and workshare constructs proceeds in two |
| 65 | phases. The first phase scans the function looking for OMP statements |
| 66 | and then for variables that must be replaced to satisfy data sharing |
| 67 | clauses. The second phase expands code for the constructs, as well as |
| 68 | re-gimplifying things when variables have been replaced with complex |
| 69 | expressions. |
| 70 | |
| 71 | Final code generation is done by pass_expand_omp. The flowgraph is |
| 72 | scanned for regions which are then moved to a new |
| 73 | function, to be invoked by the thread library, or offloaded. */ |
| 74 | |
| 75 | /* Context structure. Used to store information about each parallel |
| 76 | directive in the code. */ |
| 77 | |
| 78 | struct omp_context |
| 79 | { |
| 80 | /* This field must be at the beginning, as we do "inheritance": Some |
| 81 | callback functions for tree-inline.cc (e.g., omp_copy_decl) |
| 82 | receive a copy_body_data pointer that is up-casted to an |
| 83 | omp_context pointer. */ |
| 84 | copy_body_data cb; |
| 85 | |
| 86 | /* The tree of contexts corresponding to the encountered constructs. */ |
| 87 | struct omp_context *outer; |
| 88 | gimple *stmt; |
| 89 | |
| 90 | /* Map variables to fields in a structure that allows communication |
| 91 | between sending and receiving threads. */ |
| 92 | splay_tree field_map; |
| 93 | tree record_type; |
| 94 | tree sender_decl; |
| 95 | tree receiver_decl; |
| 96 | |
| 97 | /* These are used just by task contexts, if task firstprivate fn is |
| 98 | needed. srecord_type is used to communicate from the thread |
| 99 | that encountered the task construct to task firstprivate fn, |
| 100 | record_type is allocated by GOMP_task, initialized by task firstprivate |
| 101 | fn and passed to the task body fn. */ |
| 102 | splay_tree sfield_map; |
| 103 | tree srecord_type; |
| 104 | |
| 105 | /* A chain of variables to add to the top-level block surrounding the |
| 106 | construct. In the case of a parallel, this is in the child function. */ |
| 107 | tree block_vars; |
| 108 | |
| 109 | /* Label to which GOMP_cancel{,llation_point} and explicit and implicit |
| 110 | barriers should jump to during omplower pass. */ |
| 111 | tree cancel_label; |
| 112 | |
| 113 | /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL |
| 114 | otherwise. */ |
| 115 | gimple *simt_stmt; |
| 116 | |
| 117 | /* For task reductions registered in this context, a vector containing |
| 118 | the length of the private copies block (if constant, otherwise NULL) |
| 119 | and then offsets (if constant, otherwise NULL) for each entry. */ |
| 120 | vec<tree> task_reductions; |
| 121 | |
| 122 | /* A hash map from the reduction clauses to the registered array |
| 123 | elts. */ |
| 124 | hash_map<tree, unsigned> *task_reduction_map; |
| 125 | |
| 126 | /* And a hash map from the lastprivate(conditional:) variables to their |
| 127 | corresponding tracking loop iteration variables. */ |
| 128 | hash_map<tree, tree> *lastprivate_conditional_map; |
| 129 | |
| 130 | /* And a hash map from the allocate variables to their corresponding |
| 131 | allocators. */ |
| 132 | hash_map<tree, tree> *allocate_map; |
| 133 | |
| 134 | /* A tree_list of the reduction clauses in this context. This is |
| 135 | only used for checking the consistency of OpenACC reduction |
| 136 | clauses in scan_omp_for and is not guaranteed to contain a valid |
| 137 | value outside of this function. */ |
| 138 | tree local_reduction_clauses; |
| 139 | |
| 140 | /* A tree_list of the reduction clauses in outer contexts. This is |
| 141 | only used for checking the consistency of OpenACC reduction |
| 142 | clauses in scan_omp_for and is not guaranteed to contain a valid |
| 143 | value outside of this function. */ |
| 144 | tree outer_reduction_clauses; |
| 145 | |
| 146 | /* Nesting depth of this context. Used to beautify error messages re |
| 147 | invalid gotos. The outermost ctx is depth 1, with depth 0 being |
| 148 | reserved for the main body of the function. */ |
| 149 | int depth; |
| 150 | |
| 151 | /* True if this parallel directive is nested within another. */ |
| 152 | bool is_nested; |
| 153 | |
| 154 | /* True if this construct can be cancelled. */ |
| 155 | bool cancellable; |
| 156 | |
| 157 | /* True if lower_omp_1 should look up lastprivate conditional in parent |
| 158 | context. */ |
| 159 | bool combined_into_simd_safelen1; |
| 160 | |
| 161 | /* True if there is nested scan context with inclusive clause. */ |
| 162 | bool scan_inclusive; |
| 163 | |
| 164 | /* True if there is nested scan context with exclusive clause. */ |
| 165 | bool scan_exclusive; |
| 166 | |
| 167 | /* True in the second simd loop of for simd with inscan reductions. */ |
| 168 | bool for_simd_scan_phase; |
| 169 | |
| 170 | /* True if there is order(concurrent) clause on the construct. */ |
| 171 | bool order_concurrent; |
| 172 | |
| 173 | /* True if there is bind clause on the construct (i.e. a loop construct). */ |
| 174 | bool loop_p; |
| 175 | |
| 176 | /* Only used for omp target contexts. True if a teams construct is |
| 177 | strictly nested in it. */ |
| 178 | bool teams_nested_p; |
| 179 | |
| 180 | /* Only used for omp target contexts. True if an OpenMP construct other |
| 181 | than teams is strictly nested in it. */ |
| 182 | bool nonteams_nested_p; |
| 183 | |
| 184 | /* Candidates for adjusting OpenACC privatization level. */ |
| 185 | vec<tree> oacc_privatization_candidates; |
| 186 | }; |
| 187 | |
| 188 | static splay_tree all_contexts; |
| 189 | static int taskreg_nesting_level; |
| 190 | static int target_nesting_level; |
| 191 | static bitmap make_addressable_vars; |
| 192 | static bitmap global_nonaddressable_vars; |
| 193 | static vec<omp_context *> taskreg_contexts; |
| 194 | static vec<gomp_task *> task_cpyfns; |
| 195 | |
| 196 | static void scan_omp (gimple_seq *, omp_context *); |
| 197 | static tree scan_omp_1_op (tree *, int *, void *); |
| 198 | static bool omp_maybe_offloaded_ctx (omp_context *ctx); |
| 199 | |
| 200 | #define WALK_SUBSTMTS \ |
| 201 | case GIMPLE_BIND: \ |
| 202 | case GIMPLE_TRY: \ |
| 203 | case GIMPLE_CATCH: \ |
| 204 | case GIMPLE_EH_FILTER: \ |
| 205 | case GIMPLE_ASSUME: \ |
| 206 | case GIMPLE_TRANSACTION: \ |
| 207 | /* The sub-statements for these should be walked. */ \ |
| 208 | *handled_ops_p = false; \ |
| 209 | break; |
| 210 | |
| 211 | /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct. |
| 212 | (This doesn't include OpenACC 'kernels' decomposed parts.) */ |
| 213 | |
| 214 | static bool |
| 215 | is_oacc_parallel_or_serial (omp_context *ctx) |
| 216 | { |
| 217 | enum gimple_code outer_type = gimple_code (g: ctx->stmt); |
| 218 | return ((outer_type == GIMPLE_OMP_TARGET) |
| 219 | && ((gimple_omp_target_kind (g: ctx->stmt) |
| 220 | == GF_OMP_TARGET_KIND_OACC_PARALLEL) |
| 221 | || (gimple_omp_target_kind (g: ctx->stmt) |
| 222 | == GF_OMP_TARGET_KIND_OACC_SERIAL))); |
| 223 | } |
| 224 | |
| 225 | /* Return whether CTX represents an OpenACC 'kernels' construct. |
| 226 | (This doesn't include OpenACC 'kernels' decomposed parts.) */ |
| 227 | |
| 228 | static bool |
| 229 | is_oacc_kernels (omp_context *ctx) |
| 230 | { |
| 231 | enum gimple_code outer_type = gimple_code (g: ctx->stmt); |
| 232 | return ((outer_type == GIMPLE_OMP_TARGET) |
| 233 | && (gimple_omp_target_kind (g: ctx->stmt) |
| 234 | == GF_OMP_TARGET_KIND_OACC_KERNELS)); |
| 235 | } |
| 236 | |
| 237 | /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */ |
| 238 | |
| 239 | static bool |
| 240 | is_oacc_kernels_decomposed_part (omp_context *ctx) |
| 241 | { |
| 242 | enum gimple_code outer_type = gimple_code (g: ctx->stmt); |
| 243 | return ((outer_type == GIMPLE_OMP_TARGET) |
| 244 | && ((gimple_omp_target_kind (g: ctx->stmt) |
| 245 | == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED) |
| 246 | || (gimple_omp_target_kind (g: ctx->stmt) |
| 247 | == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE) |
| 248 | || (gimple_omp_target_kind (g: ctx->stmt) |
| 249 | == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS))); |
| 250 | } |
| 251 | |
| 252 | /* Return true if STMT corresponds to an OpenMP target region. */ |
| 253 | static bool |
| 254 | is_omp_target (gimple *stmt) |
| 255 | { |
| 256 | if (gimple_code (g: stmt) == GIMPLE_OMP_TARGET) |
| 257 | { |
| 258 | int kind = gimple_omp_target_kind (g: stmt); |
| 259 | return (kind == GF_OMP_TARGET_KIND_REGION |
| 260 | || kind == GF_OMP_TARGET_KIND_DATA |
| 261 | || kind == GF_OMP_TARGET_KIND_ENTER_DATA |
| 262 | || kind == GF_OMP_TARGET_KIND_EXIT_DATA); |
| 263 | } |
| 264 | return false; |
| 265 | } |
| 266 | |
| 267 | /* If DECL is the artificial dummy VAR_DECL created for non-static |
| 268 | data member privatization, return the underlying "this" parameter, |
| 269 | otherwise return NULL. */ |
| 270 | |
| 271 | tree |
| 272 | omp_member_access_dummy_var (tree decl) |
| 273 | { |
| 274 | if (!VAR_P (decl) |
| 275 | || !DECL_ARTIFICIAL (decl) |
| 276 | || !DECL_IGNORED_P (decl) |
| 277 | || !DECL_HAS_VALUE_EXPR_P (decl) |
| 278 | || !lang_hooks.decls.omp_disregard_value_expr (decl, false)) |
| 279 | return NULL_TREE; |
| 280 | |
| 281 | tree v = DECL_VALUE_EXPR (decl); |
| 282 | if (TREE_CODE (v) != COMPONENT_REF) |
| 283 | return NULL_TREE; |
| 284 | |
| 285 | while (1) |
| 286 | switch (TREE_CODE (v)) |
| 287 | { |
| 288 | case COMPONENT_REF: |
| 289 | case MEM_REF: |
| 290 | case INDIRECT_REF: |
| 291 | CASE_CONVERT: |
| 292 | case POINTER_PLUS_EXPR: |
| 293 | v = TREE_OPERAND (v, 0); |
| 294 | continue; |
| 295 | case PARM_DECL: |
| 296 | if (DECL_CONTEXT (v) == current_function_decl |
| 297 | && DECL_ARTIFICIAL (v) |
| 298 | && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE) |
| 299 | return v; |
| 300 | return NULL_TREE; |
| 301 | default: |
| 302 | return NULL_TREE; |
| 303 | } |
| 304 | } |
| 305 | |
| 306 | /* Helper for unshare_and_remap, called through walk_tree. */ |
| 307 | |
| 308 | static tree |
| 309 | unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data) |
| 310 | { |
| 311 | tree *pair = (tree *) data; |
| 312 | if (*tp == pair[0]) |
| 313 | { |
| 314 | *tp = unshare_expr (pair[1]); |
| 315 | *walk_subtrees = 0; |
| 316 | } |
| 317 | else if (IS_TYPE_OR_DECL_P (*tp)) |
| 318 | *walk_subtrees = 0; |
| 319 | return NULL_TREE; |
| 320 | } |
| 321 | |
| 322 | /* Return unshare_expr (X) with all occurrences of FROM |
| 323 | replaced with TO. */ |
| 324 | |
| 325 | static tree |
| 326 | unshare_and_remap (tree x, tree from, tree to) |
| 327 | { |
| 328 | tree pair[2] = { from, to }; |
| 329 | x = unshare_expr (x); |
| 330 | walk_tree (&x, unshare_and_remap_1, pair, NULL); |
| 331 | return x; |
| 332 | } |
| 333 | |
| 334 | /* Convenience function for calling scan_omp_1_op on tree operands. */ |
| 335 | |
| 336 | static inline tree |
| 337 | scan_omp_op (tree *tp, omp_context *ctx) |
| 338 | { |
| 339 | struct walk_stmt_info wi; |
| 340 | |
| 341 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 342 | wi.info = ctx; |
| 343 | wi.want_locations = true; |
| 344 | |
| 345 | return walk_tree (tp, scan_omp_1_op, &wi, NULL); |
| 346 | } |
| 347 | |
| 348 | static void lower_omp (gimple_seq *, omp_context *); |
| 349 | static tree lookup_decl_in_outer_ctx (tree, omp_context *); |
| 350 | static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *); |
| 351 | |
| 352 | /* Return true if CTX is for an omp parallel. */ |
| 353 | |
| 354 | static inline bool |
| 355 | is_parallel_ctx (omp_context *ctx) |
| 356 | { |
| 357 | return gimple_code (g: ctx->stmt) == GIMPLE_OMP_PARALLEL; |
| 358 | } |
| 359 | |
| 360 | |
| 361 | /* Return true if CTX is for an omp task. */ |
| 362 | |
| 363 | static inline bool |
| 364 | is_task_ctx (omp_context *ctx) |
| 365 | { |
| 366 | return gimple_code (g: ctx->stmt) == GIMPLE_OMP_TASK; |
| 367 | } |
| 368 | |
| 369 | |
| 370 | /* Return true if CTX is for an omp taskloop. */ |
| 371 | |
| 372 | static inline bool |
| 373 | is_taskloop_ctx (omp_context *ctx) |
| 374 | { |
| 375 | return gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 376 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP; |
| 377 | } |
| 378 | |
| 379 | |
| 380 | /* Return true if CTX is for a host omp teams. */ |
| 381 | |
| 382 | static inline bool |
| 383 | is_host_teams_ctx (omp_context *ctx) |
| 384 | { |
| 385 | return gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 386 | && gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: ctx->stmt)); |
| 387 | } |
| 388 | |
| 389 | /* Return true if CTX is for an omp parallel or omp task or host omp teams |
| 390 | (the last one is strictly not a task region in OpenMP speak, but we |
| 391 | need to treat it similarly). */ |
| 392 | |
| 393 | static inline bool |
| 394 | is_taskreg_ctx (omp_context *ctx) |
| 395 | { |
| 396 | return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx); |
| 397 | } |
| 398 | |
| 399 | /* Return true if EXPR is variable sized. */ |
| 400 | |
| 401 | static inline bool |
| 402 | is_variable_sized (const_tree expr) |
| 403 | { |
| 404 | return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr))); |
| 405 | } |
| 406 | |
| 407 | /* Lookup variables. The "maybe" form |
| 408 | allows for the variable form to not have been entered, otherwise we |
| 409 | assert that the variable must have been entered. */ |
| 410 | |
| 411 | static inline tree |
| 412 | lookup_decl (tree var, omp_context *ctx) |
| 413 | { |
| 414 | tree *n = ctx->cb.decl_map->get (k: var); |
| 415 | return *n; |
| 416 | } |
| 417 | |
| 418 | static inline tree |
| 419 | maybe_lookup_decl (const_tree var, omp_context *ctx) |
| 420 | { |
| 421 | tree *n = ctx->cb.decl_map->get (k: const_cast<tree> (var)); |
| 422 | return n ? *n : NULL_TREE; |
| 423 | } |
| 424 | |
| 425 | static inline tree |
| 426 | lookup_field (tree var, omp_context *ctx) |
| 427 | { |
| 428 | splay_tree_node n; |
| 429 | n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var); |
| 430 | return (tree) n->value; |
| 431 | } |
| 432 | |
| 433 | static inline tree |
| 434 | lookup_sfield (splay_tree_key key, omp_context *ctx) |
| 435 | { |
| 436 | splay_tree_node n; |
| 437 | n = splay_tree_lookup (ctx->sfield_map |
| 438 | ? ctx->sfield_map : ctx->field_map, key); |
| 439 | return (tree) n->value; |
| 440 | } |
| 441 | |
| 442 | static inline tree |
| 443 | lookup_sfield (tree var, omp_context *ctx) |
| 444 | { |
| 445 | return lookup_sfield (key: (splay_tree_key) var, ctx); |
| 446 | } |
| 447 | |
| 448 | static inline tree |
| 449 | maybe_lookup_field (splay_tree_key key, omp_context *ctx) |
| 450 | { |
| 451 | splay_tree_node n; |
| 452 | n = splay_tree_lookup (ctx->field_map, key); |
| 453 | return n ? (tree) n->value : NULL_TREE; |
| 454 | } |
| 455 | |
| 456 | static inline tree |
| 457 | maybe_lookup_field (tree var, omp_context *ctx) |
| 458 | { |
| 459 | return maybe_lookup_field (key: (splay_tree_key) var, ctx); |
| 460 | } |
| 461 | |
| 462 | /* Return true if DECL should be copied by pointer. SHARED_CTX is |
| 463 | the parallel context if DECL is to be shared. */ |
| 464 | |
| 465 | static bool |
| 466 | use_pointer_for_field (tree decl, omp_context *shared_ctx) |
| 467 | { |
| 468 | if (AGGREGATE_TYPE_P (TREE_TYPE (decl)) |
| 469 | || TYPE_ATOMIC (TREE_TYPE (decl)) |
| 470 | || POLY_INT_CST_P (DECL_SIZE (decl))) |
| 471 | return true; |
| 472 | |
| 473 | /* We can only use copy-in/copy-out semantics for shared variables |
| 474 | when we know the value is not accessible from an outer scope. */ |
| 475 | if (shared_ctx) |
| 476 | { |
| 477 | gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt)); |
| 478 | |
| 479 | /* ??? Trivially accessible from anywhere. But why would we even |
| 480 | be passing an address in this case? Should we simply assert |
| 481 | this to be false, or should we have a cleanup pass that removes |
| 482 | these from the list of mappings? */ |
| 483 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, shared_ctx))) |
| 484 | return true; |
| 485 | |
| 486 | /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell |
| 487 | without analyzing the expression whether or not its location |
| 488 | is accessible to anyone else. In the case of nested parallel |
| 489 | regions it certainly may be. */ |
| 490 | if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl)) |
| 491 | return true; |
| 492 | |
| 493 | /* Do not use copy-in/copy-out for variables that have their |
| 494 | address taken. */ |
| 495 | if (is_global_var (t: decl)) |
| 496 | { |
| 497 | /* For file scope vars, track whether we've seen them as |
| 498 | non-addressable initially and in that case, keep the same |
| 499 | answer for the duration of the pass, even when they are made |
| 500 | addressable later on e.g. through reduction expansion. Global |
| 501 | variables which weren't addressable before the pass will not |
| 502 | have their privatized copies address taken. See PR91216. */ |
| 503 | if (!TREE_ADDRESSABLE (decl)) |
| 504 | { |
| 505 | if (!global_nonaddressable_vars) |
| 506 | global_nonaddressable_vars = BITMAP_ALLOC (NULL); |
| 507 | bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl)); |
| 508 | } |
| 509 | else if (!global_nonaddressable_vars |
| 510 | || !bitmap_bit_p (global_nonaddressable_vars, |
| 511 | DECL_UID (decl))) |
| 512 | return true; |
| 513 | } |
| 514 | else if (TREE_ADDRESSABLE (decl)) |
| 515 | return true; |
| 516 | |
| 517 | /* lower_send_shared_vars only uses copy-in, but not copy-out |
| 518 | for these. */ |
| 519 | if (TREE_READONLY (decl) |
| 520 | || ((TREE_CODE (decl) == RESULT_DECL |
| 521 | || TREE_CODE (decl) == PARM_DECL) |
| 522 | && DECL_BY_REFERENCE (decl))) |
| 523 | return false; |
| 524 | |
| 525 | /* Disallow copy-in/out in nested parallel if |
| 526 | decl is shared in outer parallel, otherwise |
| 527 | each thread could store the shared variable |
| 528 | in its own copy-in location, making the |
| 529 | variable no longer really shared. */ |
| 530 | if (shared_ctx->is_nested) |
| 531 | { |
| 532 | omp_context *up; |
| 533 | |
| 534 | for (up = shared_ctx->outer; up; up = up->outer) |
| 535 | if ((is_taskreg_ctx (ctx: up) |
| 536 | || (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET |
| 537 | && is_gimple_omp_offloaded (stmt: up->stmt))) |
| 538 | && maybe_lookup_decl (var: decl, ctx: up)) |
| 539 | break; |
| 540 | |
| 541 | if (up) |
| 542 | { |
| 543 | tree c; |
| 544 | |
| 545 | if (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET) |
| 546 | { |
| 547 | for (c = gimple_omp_target_clauses (gs: up->stmt); |
| 548 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 549 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 550 | && OMP_CLAUSE_DECL (c) == decl) |
| 551 | break; |
| 552 | } |
| 553 | else |
| 554 | for (c = gimple_omp_taskreg_clauses (gs: up->stmt); |
| 555 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 556 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
| 557 | && OMP_CLAUSE_DECL (c) == decl) |
| 558 | break; |
| 559 | |
| 560 | if (c) |
| 561 | goto maybe_mark_addressable_and_ret; |
| 562 | } |
| 563 | } |
| 564 | |
| 565 | /* For tasks avoid using copy-in/out. As tasks can be |
| 566 | deferred or executed in different thread, when GOMP_task |
| 567 | returns, the task hasn't necessarily terminated. */ |
| 568 | if (is_task_ctx (ctx: shared_ctx)) |
| 569 | { |
| 570 | tree outer; |
| 571 | maybe_mark_addressable_and_ret: |
| 572 | outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx); |
| 573 | if (is_gimple_reg (outer) && !omp_member_access_dummy_var (decl: outer)) |
| 574 | { |
| 575 | /* Taking address of OUTER in lower_send_shared_vars |
| 576 | might need regimplification of everything that uses the |
| 577 | variable. */ |
| 578 | if (!make_addressable_vars) |
| 579 | make_addressable_vars = BITMAP_ALLOC (NULL); |
| 580 | bitmap_set_bit (make_addressable_vars, DECL_UID (outer)); |
| 581 | TREE_ADDRESSABLE (outer) = 1; |
| 582 | } |
| 583 | return true; |
| 584 | } |
| 585 | } |
| 586 | |
| 587 | return false; |
| 588 | } |
| 589 | |
| 590 | /* Construct a new automatic decl similar to VAR. */ |
| 591 | |
| 592 | static tree |
| 593 | omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx) |
| 594 | { |
| 595 | tree copy = copy_var_decl (var, name, type); |
| 596 | |
| 597 | DECL_CONTEXT (copy) = current_function_decl; |
| 598 | |
| 599 | if (ctx) |
| 600 | { |
| 601 | DECL_CHAIN (copy) = ctx->block_vars; |
| 602 | ctx->block_vars = copy; |
| 603 | } |
| 604 | else |
| 605 | record_vars (copy); |
| 606 | |
| 607 | /* If VAR is listed in make_addressable_vars, it wasn't |
| 608 | originally addressable, but was only later made so. |
| 609 | We don't need to take address of privatizations |
| 610 | from that var. */ |
| 611 | if (TREE_ADDRESSABLE (var) |
| 612 | && ((make_addressable_vars |
| 613 | && bitmap_bit_p (make_addressable_vars, DECL_UID (var))) |
| 614 | || (global_nonaddressable_vars |
| 615 | && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var))))) |
| 616 | TREE_ADDRESSABLE (copy) = 0; |
| 617 | |
| 618 | return copy; |
| 619 | } |
| 620 | |
| 621 | static tree |
| 622 | omp_copy_decl_1 (tree var, omp_context *ctx) |
| 623 | { |
| 624 | return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx); |
| 625 | } |
| 626 | |
| 627 | /* Build tree nodes to access the field for VAR on the receiver side. */ |
| 628 | |
| 629 | static tree |
| 630 | build_receiver_ref (tree var, bool by_ref, omp_context *ctx) |
| 631 | { |
| 632 | tree x, field = lookup_field (var, ctx); |
| 633 | |
| 634 | /* If the receiver record type was remapped in the child function, |
| 635 | remap the field into the new record type. */ |
| 636 | x = maybe_lookup_field (var: field, ctx); |
| 637 | if (x != NULL) |
| 638 | field = x; |
| 639 | |
| 640 | x = build_simple_mem_ref (ctx->receiver_decl); |
| 641 | TREE_THIS_NOTRAP (x) = 1; |
| 642 | x = omp_build_component_ref (obj: x, field); |
| 643 | if (by_ref) |
| 644 | { |
| 645 | x = build_simple_mem_ref (x); |
| 646 | TREE_THIS_NOTRAP (x) = 1; |
| 647 | } |
| 648 | |
| 649 | return x; |
| 650 | } |
| 651 | |
| 652 | /* Build tree nodes to access VAR in the scope outer to CTX. In the case |
| 653 | of a parallel, this is a component reference; for workshare constructs |
| 654 | this is some variable. */ |
| 655 | |
| 656 | static tree |
| 657 | build_outer_var_ref (tree var, omp_context *ctx, |
| 658 | enum omp_clause_code code = OMP_CLAUSE_ERROR) |
| 659 | { |
| 660 | tree x; |
| 661 | omp_context *outer = ctx->outer; |
| 662 | for (; outer; outer = outer->outer) |
| 663 | { |
| 664 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_TASKGROUP) |
| 665 | continue; |
| 666 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_SCOPE |
| 667 | && !maybe_lookup_decl (var, ctx: outer)) |
| 668 | continue; |
| 669 | break; |
| 670 | } |
| 671 | |
| 672 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (var, ctx))) |
| 673 | x = var; |
| 674 | else if (is_variable_sized (expr: var)) |
| 675 | { |
| 676 | x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0); |
| 677 | x = build_outer_var_ref (var: x, ctx, code); |
| 678 | x = build_simple_mem_ref (x); |
| 679 | } |
| 680 | else if (is_taskreg_ctx (ctx)) |
| 681 | { |
| 682 | bool by_ref = use_pointer_for_field (decl: var, NULL); |
| 683 | x = build_receiver_ref (var, by_ref, ctx); |
| 684 | } |
| 685 | else if ((gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 686 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD) |
| 687 | || ctx->loop_p |
| 688 | || code == OMP_CLAUSE_ALLOCATE |
| 689 | || (code == OMP_CLAUSE_PRIVATE |
| 690 | && (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 691 | || gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS |
| 692 | || gimple_code (g: ctx->stmt) == GIMPLE_OMP_SINGLE))) |
| 693 | { |
| 694 | /* #pragma omp simd isn't a worksharing construct, and can reference |
| 695 | even private vars in its linear etc. clauses. |
| 696 | Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer |
| 697 | to private vars in all worksharing constructs. */ |
| 698 | x = NULL_TREE; |
| 699 | if (outer && is_taskreg_ctx (ctx: outer)) |
| 700 | x = lookup_decl (var, ctx: outer); |
| 701 | else if (outer) |
| 702 | x = maybe_lookup_decl_in_outer_ctx (var, ctx); |
| 703 | if (x == NULL_TREE) |
| 704 | x = var; |
| 705 | } |
| 706 | else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx)) |
| 707 | { |
| 708 | gcc_assert (outer); |
| 709 | splay_tree_node n |
| 710 | = splay_tree_lookup (outer->field_map, |
| 711 | (splay_tree_key) &DECL_UID (var)); |
| 712 | if (n == NULL) |
| 713 | { |
| 714 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (var, outer))) |
| 715 | x = var; |
| 716 | else |
| 717 | x = lookup_decl (var, ctx: outer); |
| 718 | } |
| 719 | else |
| 720 | { |
| 721 | tree field = (tree) n->value; |
| 722 | /* If the receiver record type was remapped in the child function, |
| 723 | remap the field into the new record type. */ |
| 724 | x = maybe_lookup_field (var: field, ctx: outer); |
| 725 | if (x != NULL) |
| 726 | field = x; |
| 727 | |
| 728 | x = build_simple_mem_ref (outer->receiver_decl); |
| 729 | x = omp_build_component_ref (obj: x, field); |
| 730 | if (use_pointer_for_field (decl: var, shared_ctx: outer)) |
| 731 | x = build_simple_mem_ref (x); |
| 732 | } |
| 733 | } |
| 734 | else if (outer) |
| 735 | x = lookup_decl (var, ctx: outer); |
| 736 | else if (omp_privatize_by_reference (decl: var)) |
| 737 | /* This can happen with orphaned constructs. If var is reference, it is |
| 738 | possible it is shared and as such valid. */ |
| 739 | x = var; |
| 740 | else if (omp_member_access_dummy_var (decl: var)) |
| 741 | x = var; |
| 742 | else |
| 743 | gcc_unreachable (); |
| 744 | |
| 745 | if (x == var) |
| 746 | { |
| 747 | tree t = omp_member_access_dummy_var (decl: var); |
| 748 | if (t) |
| 749 | { |
| 750 | x = DECL_VALUE_EXPR (var); |
| 751 | tree o = maybe_lookup_decl_in_outer_ctx (t, ctx); |
| 752 | if (o != t) |
| 753 | x = unshare_and_remap (x, from: t, to: o); |
| 754 | else |
| 755 | x = unshare_expr (x); |
| 756 | } |
| 757 | } |
| 758 | |
| 759 | if (omp_privatize_by_reference (decl: var)) |
| 760 | x = build_simple_mem_ref (x); |
| 761 | |
| 762 | return x; |
| 763 | } |
| 764 | |
| 765 | /* Build tree nodes to access the field for VAR on the sender side. */ |
| 766 | |
| 767 | static tree |
| 768 | build_sender_ref (splay_tree_key key, omp_context *ctx) |
| 769 | { |
| 770 | tree field = lookup_sfield (key, ctx); |
| 771 | tree tmp = ctx->sender_decl; |
| 772 | if (POINTER_TYPE_P (TREE_TYPE (tmp))) |
| 773 | tmp = build_fold_indirect_ref (tmp); |
| 774 | return omp_build_component_ref (obj: tmp, field); |
| 775 | } |
| 776 | |
| 777 | static tree |
| 778 | build_sender_ref (tree var, omp_context *ctx) |
| 779 | { |
| 780 | return build_sender_ref (key: (splay_tree_key) var, ctx); |
| 781 | } |
| 782 | |
| 783 | /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If |
| 784 | BASE_POINTERS_RESTRICT, declare the field with restrict. */ |
| 785 | |
| 786 | static void |
| 787 | install_var_field (tree var, bool by_ref, int mask, omp_context *ctx) |
| 788 | { |
| 789 | tree field, type, sfield = NULL_TREE; |
| 790 | splay_tree_key key = (splay_tree_key) var; |
| 791 | |
| 792 | if ((mask & 16) != 0) |
| 793 | { |
| 794 | key = (splay_tree_key) &DECL_NAME (var); |
| 795 | gcc_checking_assert (key != (splay_tree_key) var); |
| 796 | } |
| 797 | if ((mask & 8) != 0) |
| 798 | { |
| 799 | key = (splay_tree_key) &DECL_UID (var); |
| 800 | gcc_checking_assert (key != (splay_tree_key) var); |
| 801 | } |
| 802 | gcc_assert ((mask & 1) == 0 |
| 803 | || !splay_tree_lookup (ctx->field_map, key)); |
| 804 | gcc_assert ((mask & 2) == 0 || !ctx->sfield_map |
| 805 | || !splay_tree_lookup (ctx->sfield_map, key)); |
| 806 | gcc_assert ((mask & 3) == 3 |
| 807 | || !is_gimple_omp_oacc (ctx->stmt)); |
| 808 | |
| 809 | type = TREE_TYPE (var); |
| 810 | if ((mask & 16) != 0) |
| 811 | type = lang_hooks.decls.omp_array_data (var, true); |
| 812 | |
| 813 | /* Prevent redeclaring the var in the split-off function with a restrict |
| 814 | pointer type. Note that we only clear type itself, restrict qualifiers in |
| 815 | the pointed-to type will be ignored by points-to analysis. */ |
| 816 | if (POINTER_TYPE_P (type) |
| 817 | && TYPE_RESTRICT (type)) |
| 818 | type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT); |
| 819 | |
| 820 | if (mask & 4) |
| 821 | { |
| 822 | gcc_assert (TREE_CODE (type) == ARRAY_TYPE); |
| 823 | type = build_pointer_type (build_pointer_type (type)); |
| 824 | } |
| 825 | else if (by_ref) |
| 826 | type = build_pointer_type (type); |
| 827 | else if ((mask & (32 | 3)) == 1 |
| 828 | && omp_privatize_by_reference (decl: var)) |
| 829 | type = TREE_TYPE (type); |
| 830 | |
| 831 | field = build_decl (DECL_SOURCE_LOCATION (var), |
| 832 | FIELD_DECL, DECL_NAME (var), type); |
| 833 | |
| 834 | /* Remember what variable this field was created for. This does have a |
| 835 | side effect of making dwarf2out ignore this member, so for helpful |
| 836 | debugging we clear it later in delete_omp_context. */ |
| 837 | DECL_ABSTRACT_ORIGIN (field) = var; |
| 838 | if ((mask & 16) == 0 && type == TREE_TYPE (var)) |
| 839 | { |
| 840 | SET_DECL_ALIGN (field, DECL_ALIGN (var)); |
| 841 | DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var); |
| 842 | TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var); |
| 843 | } |
| 844 | else |
| 845 | SET_DECL_ALIGN (field, TYPE_ALIGN (type)); |
| 846 | |
| 847 | if ((mask & 3) == 3) |
| 848 | { |
| 849 | insert_field_into_struct (ctx->record_type, field); |
| 850 | if (ctx->srecord_type) |
| 851 | { |
| 852 | sfield = build_decl (DECL_SOURCE_LOCATION (var), |
| 853 | FIELD_DECL, DECL_NAME (var), type); |
| 854 | DECL_ABSTRACT_ORIGIN (sfield) = var; |
| 855 | SET_DECL_ALIGN (sfield, DECL_ALIGN (field)); |
| 856 | DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field); |
| 857 | TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field); |
| 858 | insert_field_into_struct (ctx->srecord_type, sfield); |
| 859 | } |
| 860 | } |
| 861 | else |
| 862 | { |
| 863 | if (ctx->srecord_type == NULL_TREE) |
| 864 | { |
| 865 | tree t; |
| 866 | |
| 867 | ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 868 | ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 869 | for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t)) |
| 870 | { |
| 871 | sfield = build_decl (DECL_SOURCE_LOCATION (t), |
| 872 | FIELD_DECL, DECL_NAME (t), TREE_TYPE (t)); |
| 873 | DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t); |
| 874 | insert_field_into_struct (ctx->srecord_type, sfield); |
| 875 | splay_tree_insert (ctx->sfield_map, |
| 876 | (splay_tree_key) DECL_ABSTRACT_ORIGIN (t), |
| 877 | (splay_tree_value) sfield); |
| 878 | } |
| 879 | } |
| 880 | sfield = field; |
| 881 | insert_field_into_struct ((mask & 1) ? ctx->record_type |
| 882 | : ctx->srecord_type, field); |
| 883 | } |
| 884 | |
| 885 | if (mask & 1) |
| 886 | splay_tree_insert (ctx->field_map, key, (splay_tree_value) field); |
| 887 | if ((mask & 2) && ctx->sfield_map) |
| 888 | splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield); |
| 889 | } |
| 890 | |
| 891 | static tree |
| 892 | install_var_local (tree var, omp_context *ctx) |
| 893 | { |
| 894 | tree new_var = omp_copy_decl_1 (var, ctx); |
| 895 | insert_decl_map (&ctx->cb, var, new_var); |
| 896 | return new_var; |
| 897 | } |
| 898 | |
| 899 | /* Adjust the replacement for DECL in CTX for the new context. This means |
| 900 | copying the DECL_VALUE_EXPR, and fixing up the type. */ |
| 901 | |
| 902 | static void |
| 903 | fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug) |
| 904 | { |
| 905 | tree new_decl, size; |
| 906 | |
| 907 | new_decl = lookup_decl (var: decl, ctx); |
| 908 | |
| 909 | TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), id: &ctx->cb); |
| 910 | |
| 911 | if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug) |
| 912 | && DECL_HAS_VALUE_EXPR_P (decl)) |
| 913 | { |
| 914 | tree ve = DECL_VALUE_EXPR (decl); |
| 915 | walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL); |
| 916 | SET_DECL_VALUE_EXPR (new_decl, ve); |
| 917 | DECL_HAS_VALUE_EXPR_P (new_decl) = 1; |
| 918 | } |
| 919 | |
| 920 | if (!TREE_CONSTANT (DECL_SIZE (new_decl))) |
| 921 | { |
| 922 | size = remap_decl (DECL_SIZE (decl), id: &ctx->cb); |
| 923 | if (size == error_mark_node) |
| 924 | size = TYPE_SIZE (TREE_TYPE (new_decl)); |
| 925 | DECL_SIZE (new_decl) = size; |
| 926 | |
| 927 | size = remap_decl (DECL_SIZE_UNIT (decl), id: &ctx->cb); |
| 928 | if (size == error_mark_node) |
| 929 | size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl)); |
| 930 | DECL_SIZE_UNIT (new_decl) = size; |
| 931 | } |
| 932 | } |
| 933 | |
| 934 | /* The callback for remap_decl. Search all containing contexts for a |
| 935 | mapping of the variable; this avoids having to duplicate the splay |
| 936 | tree ahead of time. We know a mapping doesn't already exist in the |
| 937 | given context. Create new mappings to implement default semantics. */ |
| 938 | |
| 939 | static tree |
| 940 | omp_copy_decl (tree var, copy_body_data *cb) |
| 941 | { |
| 942 | omp_context *ctx = (omp_context *) cb; |
| 943 | tree new_var; |
| 944 | |
| 945 | if (TREE_CODE (var) == LABEL_DECL) |
| 946 | { |
| 947 | if (FORCED_LABEL (var) || DECL_NONLOCAL (var)) |
| 948 | return var; |
| 949 | new_var = create_artificial_label (DECL_SOURCE_LOCATION (var)); |
| 950 | DECL_CONTEXT (new_var) = current_function_decl; |
| 951 | insert_decl_map (&ctx->cb, var, new_var); |
| 952 | return new_var; |
| 953 | } |
| 954 | |
| 955 | while (!is_taskreg_ctx (ctx)) |
| 956 | { |
| 957 | ctx = ctx->outer; |
| 958 | if (ctx == NULL) |
| 959 | return var; |
| 960 | new_var = maybe_lookup_decl (var, ctx); |
| 961 | if (new_var) |
| 962 | return new_var; |
| 963 | } |
| 964 | |
| 965 | if (is_global_var (t: var) || decl_function_context (var) != ctx->cb.src_fn) |
| 966 | return var; |
| 967 | |
| 968 | return error_mark_node; |
| 969 | } |
| 970 | |
| 971 | /* Create a new context, with OUTER_CTX being the surrounding context. */ |
| 972 | |
| 973 | static omp_context * |
| 974 | new_omp_context (gimple *stmt, omp_context *outer_ctx) |
| 975 | { |
| 976 | omp_context *ctx = XCNEW (omp_context); |
| 977 | |
| 978 | splay_tree_insert (all_contexts, (splay_tree_key) stmt, |
| 979 | (splay_tree_value) ctx); |
| 980 | ctx->stmt = stmt; |
| 981 | |
| 982 | if (outer_ctx) |
| 983 | { |
| 984 | ctx->outer = outer_ctx; |
| 985 | ctx->cb = outer_ctx->cb; |
| 986 | ctx->cb.block = NULL; |
| 987 | ctx->depth = outer_ctx->depth + 1; |
| 988 | } |
| 989 | else |
| 990 | { |
| 991 | ctx->cb.src_fn = current_function_decl; |
| 992 | ctx->cb.dst_fn = current_function_decl; |
| 993 | ctx->cb.src_node = cgraph_node::get (decl: current_function_decl); |
| 994 | gcc_checking_assert (ctx->cb.src_node); |
| 995 | ctx->cb.dst_node = ctx->cb.src_node; |
| 996 | ctx->cb.src_cfun = cfun; |
| 997 | ctx->cb.copy_decl = omp_copy_decl; |
| 998 | ctx->cb.eh_lp_nr = 0; |
| 999 | ctx->cb.transform_call_graph_edges = CB_CGE_MOVE; |
| 1000 | ctx->cb.adjust_array_error_bounds = true; |
| 1001 | ctx->cb.dont_remap_vla_if_no_change = true; |
| 1002 | ctx->depth = 1; |
| 1003 | } |
| 1004 | |
| 1005 | ctx->cb.decl_map = new hash_map<tree, tree>; |
| 1006 | |
| 1007 | return ctx; |
| 1008 | } |
| 1009 | |
| 1010 | static gimple_seq maybe_catch_exception (gimple_seq); |
| 1011 | |
| 1012 | /* Finalize task copyfn. */ |
| 1013 | |
| 1014 | static void |
| 1015 | finalize_task_copyfn (gomp_task *task_stmt) |
| 1016 | { |
| 1017 | struct function *child_cfun; |
| 1018 | tree child_fn; |
| 1019 | gimple_seq seq = NULL, new_seq; |
| 1020 | gbind *bind; |
| 1021 | |
| 1022 | child_fn = gimple_omp_task_copy_fn (gs: task_stmt); |
| 1023 | if (child_fn == NULL_TREE) |
| 1024 | return; |
| 1025 | |
| 1026 | child_cfun = DECL_STRUCT_FUNCTION (child_fn); |
| 1027 | DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties; |
| 1028 | |
| 1029 | push_cfun (new_cfun: child_cfun); |
| 1030 | bind = gimplify_body (child_fn, false); |
| 1031 | gimple_seq_add_stmt (&seq, bind); |
| 1032 | new_seq = maybe_catch_exception (seq); |
| 1033 | if (new_seq != seq) |
| 1034 | { |
| 1035 | bind = gimple_build_bind (NULL, new_seq, NULL); |
| 1036 | seq = NULL; |
| 1037 | gimple_seq_add_stmt (&seq, bind); |
| 1038 | } |
| 1039 | gimple_set_body (child_fn, seq); |
| 1040 | pop_cfun (); |
| 1041 | |
| 1042 | /* Inform the callgraph about the new function. */ |
| 1043 | cgraph_node *node = cgraph_node::get_create (child_fn); |
| 1044 | node->parallelized_function = 1; |
| 1045 | cgraph_node::add_new_function (fndecl: child_fn, lowered: false); |
| 1046 | } |
| 1047 | |
| 1048 | /* Destroy a omp_context data structures. Called through the splay tree |
| 1049 | value delete callback. */ |
| 1050 | |
| 1051 | static void |
| 1052 | delete_omp_context (splay_tree_value value) |
| 1053 | { |
| 1054 | omp_context *ctx = (omp_context *) value; |
| 1055 | |
| 1056 | delete ctx->cb.decl_map; |
| 1057 | |
| 1058 | if (ctx->field_map) |
| 1059 | splay_tree_delete (ctx->field_map); |
| 1060 | if (ctx->sfield_map) |
| 1061 | splay_tree_delete (ctx->sfield_map); |
| 1062 | |
| 1063 | /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before |
| 1064 | it produces corrupt debug information. */ |
| 1065 | if (ctx->record_type) |
| 1066 | { |
| 1067 | tree t; |
| 1068 | for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t)) |
| 1069 | DECL_ABSTRACT_ORIGIN (t) = NULL; |
| 1070 | } |
| 1071 | if (ctx->srecord_type) |
| 1072 | { |
| 1073 | tree t; |
| 1074 | for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t)) |
| 1075 | DECL_ABSTRACT_ORIGIN (t) = NULL; |
| 1076 | } |
| 1077 | |
| 1078 | if (ctx->task_reduction_map) |
| 1079 | { |
| 1080 | ctx->task_reductions.release (); |
| 1081 | delete ctx->task_reduction_map; |
| 1082 | } |
| 1083 | |
| 1084 | delete ctx->lastprivate_conditional_map; |
| 1085 | delete ctx->allocate_map; |
| 1086 | |
| 1087 | XDELETE (ctx); |
| 1088 | } |
| 1089 | |
| 1090 | /* Fix up RECEIVER_DECL with a type that has been remapped to the child |
| 1091 | context. */ |
| 1092 | |
| 1093 | static void |
| 1094 | fixup_child_record_type (omp_context *ctx) |
| 1095 | { |
| 1096 | tree f, type = ctx->record_type; |
| 1097 | |
| 1098 | if (!ctx->receiver_decl) |
| 1099 | return; |
| 1100 | /* ??? It isn't sufficient to just call remap_type here, because |
| 1101 | variably_modified_type_p doesn't work the way we expect for |
| 1102 | record types. Testing each field for whether it needs remapping |
| 1103 | and creating a new record by hand works, however. */ |
| 1104 | for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f)) |
| 1105 | if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) |
| 1106 | break; |
| 1107 | if (f) |
| 1108 | { |
| 1109 | tree name, new_fields = NULL; |
| 1110 | |
| 1111 | type = lang_hooks.types.make_type (RECORD_TYPE); |
| 1112 | name = DECL_NAME (TYPE_NAME (ctx->record_type)); |
| 1113 | name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl), |
| 1114 | TYPE_DECL, name, type); |
| 1115 | TYPE_NAME (type) = name; |
| 1116 | |
| 1117 | for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f)) |
| 1118 | { |
| 1119 | tree new_f = copy_node (f); |
| 1120 | DECL_CONTEXT (new_f) = type; |
| 1121 | TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), id: &ctx->cb); |
| 1122 | DECL_CHAIN (new_f) = new_fields; |
| 1123 | walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL); |
| 1124 | walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, |
| 1125 | &ctx->cb, NULL); |
| 1126 | walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r, |
| 1127 | &ctx->cb, NULL); |
| 1128 | new_fields = new_f; |
| 1129 | |
| 1130 | /* Arrange to be able to look up the receiver field |
| 1131 | given the sender field. */ |
| 1132 | splay_tree_insert (ctx->field_map, (splay_tree_key) f, |
| 1133 | (splay_tree_value) new_f); |
| 1134 | } |
| 1135 | TYPE_FIELDS (type) = nreverse (new_fields); |
| 1136 | layout_type (type); |
| 1137 | } |
| 1138 | |
| 1139 | /* In a target region we never modify any of the pointers in *.omp_data_i, |
| 1140 | so attempt to help the optimizers. */ |
| 1141 | if (is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1142 | type = build_qualified_type (type, TYPE_QUAL_CONST); |
| 1143 | |
| 1144 | TREE_TYPE (ctx->receiver_decl) |
| 1145 | = build_qualified_type (flexible_array_type_p (type) |
| 1146 | ? build_pointer_type (type) |
| 1147 | : build_reference_type (type), TYPE_QUAL_RESTRICT); |
| 1148 | } |
| 1149 | |
| 1150 | /* Instantiate decls as necessary in CTX to satisfy the data sharing |
| 1151 | specified by CLAUSES. */ |
| 1152 | |
| 1153 | static void |
| 1154 | scan_sharing_clauses (tree clauses, omp_context *ctx) |
| 1155 | { |
| 1156 | tree c, decl; |
| 1157 | bool scan_array_reductions = false; |
| 1158 | bool flex_array_ptr = false; |
| 1159 | |
| 1160 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 1161 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE |
| 1162 | && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE |
| 1163 | /* omp_default_mem_alloc is 1 */ |
| 1164 | || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) |
| 1165 | || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE)) |
| 1166 | { |
| 1167 | /* The allocate clauses that appear on a target construct or on |
| 1168 | constructs in a target region must specify an allocator expression |
| 1169 | unless a requires directive with the dynamic_allocators clause |
| 1170 | is present in the same compilation unit. */ |
| 1171 | if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE |
| 1172 | && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0) |
| 1173 | && omp_maybe_offloaded_ctx (ctx)) |
| 1174 | error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must" |
| 1175 | " specify an allocator here" ); |
| 1176 | if (ctx->allocate_map == NULL) |
| 1177 | ctx->allocate_map = new hash_map<tree, tree>; |
| 1178 | tree val = integer_zero_node; |
| 1179 | if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) |
| 1180 | val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c); |
| 1181 | if (OMP_CLAUSE_ALLOCATE_ALIGN (c)) |
| 1182 | val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c)); |
| 1183 | ctx->allocate_map->put (OMP_CLAUSE_DECL (c), v: val); |
| 1184 | } |
| 1185 | |
| 1186 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 1187 | { |
| 1188 | bool by_ref; |
| 1189 | |
| 1190 | switch (OMP_CLAUSE_CODE (c)) |
| 1191 | { |
| 1192 | case OMP_CLAUSE_PRIVATE: |
| 1193 | decl = OMP_CLAUSE_DECL (c); |
| 1194 | if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) |
| 1195 | goto do_private; |
| 1196 | else if (!is_variable_sized (expr: decl)) |
| 1197 | install_var_local (var: decl, ctx); |
| 1198 | break; |
| 1199 | |
| 1200 | case OMP_CLAUSE_SHARED: |
| 1201 | decl = OMP_CLAUSE_DECL (c); |
| 1202 | if (ctx->allocate_map && ctx->allocate_map->get (k: decl)) |
| 1203 | ctx->allocate_map->remove (k: decl); |
| 1204 | /* Ignore shared directives in teams construct inside of |
| 1205 | target construct. */ |
| 1206 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 1207 | && !is_host_teams_ctx (ctx)) |
| 1208 | { |
| 1209 | /* Global variables don't need to be copied, |
| 1210 | the receiver side will use them directly. */ |
| 1211 | tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx); |
| 1212 | if (is_global_var (t: odecl)) |
| 1213 | break; |
| 1214 | insert_decl_map (&ctx->cb, decl, odecl); |
| 1215 | break; |
| 1216 | } |
| 1217 | gcc_assert (is_taskreg_ctx (ctx)); |
| 1218 | gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl)) |
| 1219 | || !is_variable_sized (decl)); |
| 1220 | /* Global variables don't need to be copied, |
| 1221 | the receiver side will use them directly. */ |
| 1222 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))) |
| 1223 | break; |
| 1224 | if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 1225 | { |
| 1226 | use_pointer_for_field (decl, shared_ctx: ctx); |
| 1227 | break; |
| 1228 | } |
| 1229 | by_ref = use_pointer_for_field (decl, NULL); |
| 1230 | if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c)) |
| 1231 | || TREE_ADDRESSABLE (decl) |
| 1232 | || by_ref |
| 1233 | || omp_privatize_by_reference (decl)) |
| 1234 | { |
| 1235 | by_ref = use_pointer_for_field (decl, shared_ctx: ctx); |
| 1236 | install_var_field (var: decl, by_ref, mask: 3, ctx); |
| 1237 | install_var_local (var: decl, ctx); |
| 1238 | break; |
| 1239 | } |
| 1240 | /* We don't need to copy const scalar vars back. */ |
| 1241 | OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE); |
| 1242 | goto do_private; |
| 1243 | |
| 1244 | case OMP_CLAUSE_REDUCTION: |
| 1245 | /* Collect 'reduction' clauses on OpenACC compute construct. */ |
| 1246 | if (is_gimple_omp_oacc (stmt: ctx->stmt) |
| 1247 | && is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1248 | { |
| 1249 | /* No 'reduction' clauses on OpenACC 'kernels'. */ |
| 1250 | gcc_checking_assert (!is_oacc_kernels (ctx)); |
| 1251 | /* Likewise, on OpenACC 'kernels' decomposed parts. */ |
| 1252 | gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx)); |
| 1253 | |
| 1254 | ctx->local_reduction_clauses |
| 1255 | = tree_cons (NULL, c, ctx->local_reduction_clauses); |
| 1256 | } |
| 1257 | /* FALLTHRU */ |
| 1258 | |
| 1259 | case OMP_CLAUSE_IN_REDUCTION: |
| 1260 | decl = OMP_CLAUSE_DECL (c); |
| 1261 | if (ctx->allocate_map |
| 1262 | && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 1263 | && (OMP_CLAUSE_REDUCTION_INSCAN (c) |
| 1264 | || OMP_CLAUSE_REDUCTION_TASK (c))) |
| 1265 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
| 1266 | || is_task_ctx (ctx))) |
| 1267 | { |
| 1268 | /* For now. */ |
| 1269 | if (ctx->allocate_map->get (k: decl)) |
| 1270 | ctx->allocate_map->remove (k: decl); |
| 1271 | } |
| 1272 | if (TREE_CODE (decl) == MEM_REF) |
| 1273 | { |
| 1274 | tree t = TREE_OPERAND (decl, 0); |
| 1275 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
| 1276 | t = TREE_OPERAND (t, 0); |
| 1277 | if (INDIRECT_REF_P (t) |
| 1278 | || TREE_CODE (t) == ADDR_EXPR) |
| 1279 | t = TREE_OPERAND (t, 0); |
| 1280 | if (is_omp_target (stmt: ctx->stmt)) |
| 1281 | { |
| 1282 | if (is_variable_sized (expr: t)) |
| 1283 | { |
| 1284 | gcc_assert (DECL_HAS_VALUE_EXPR_P (t)); |
| 1285 | t = DECL_VALUE_EXPR (t); |
| 1286 | gcc_assert (INDIRECT_REF_P (t)); |
| 1287 | t = TREE_OPERAND (t, 0); |
| 1288 | gcc_assert (DECL_P (t)); |
| 1289 | } |
| 1290 | tree at = t; |
| 1291 | if (ctx->outer) |
| 1292 | scan_omp_op (tp: &at, ctx: ctx->outer); |
| 1293 | tree nt = omp_copy_decl_1 (var: at, ctx: ctx->outer); |
| 1294 | splay_tree_insert (ctx->field_map, |
| 1295 | (splay_tree_key) &DECL_CONTEXT (t), |
| 1296 | (splay_tree_value) nt); |
| 1297 | if (at != t) |
| 1298 | splay_tree_insert (ctx->field_map, |
| 1299 | (splay_tree_key) &DECL_CONTEXT (at), |
| 1300 | (splay_tree_value) nt); |
| 1301 | break; |
| 1302 | } |
| 1303 | install_var_local (var: t, ctx); |
| 1304 | if (is_taskreg_ctx (ctx) |
| 1305 | && (!is_global_var (t: maybe_lookup_decl_in_outer_ctx (t, ctx)) |
| 1306 | || (is_task_ctx (ctx) |
| 1307 | && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE |
| 1308 | || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE |
| 1309 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) |
| 1310 | == POINTER_TYPE))))) |
| 1311 | && !is_variable_sized (expr: t) |
| 1312 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
| 1313 | || (!OMP_CLAUSE_REDUCTION_TASK (c) |
| 1314 | && !is_task_ctx (ctx)))) |
| 1315 | { |
| 1316 | by_ref = use_pointer_for_field (decl: t, NULL); |
| 1317 | if (is_task_ctx (ctx) |
| 1318 | && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE |
| 1319 | && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE) |
| 1320 | { |
| 1321 | install_var_field (var: t, by_ref: false, mask: 1, ctx); |
| 1322 | install_var_field (var: t, by_ref, mask: 2, ctx); |
| 1323 | } |
| 1324 | else |
| 1325 | install_var_field (var: t, by_ref, mask: 3, ctx); |
| 1326 | } |
| 1327 | break; |
| 1328 | } |
| 1329 | if (is_omp_target (stmt: ctx->stmt)) |
| 1330 | { |
| 1331 | tree at = decl; |
| 1332 | if (ctx->outer) |
| 1333 | scan_omp_op (tp: &at, ctx: ctx->outer); |
| 1334 | tree nt = omp_copy_decl_1 (var: at, ctx: ctx->outer); |
| 1335 | splay_tree_insert (ctx->field_map, |
| 1336 | (splay_tree_key) &DECL_CONTEXT (decl), |
| 1337 | (splay_tree_value) nt); |
| 1338 | if (at != decl) |
| 1339 | splay_tree_insert (ctx->field_map, |
| 1340 | (splay_tree_key) &DECL_CONTEXT (at), |
| 1341 | (splay_tree_value) nt); |
| 1342 | break; |
| 1343 | } |
| 1344 | if (is_task_ctx (ctx) |
| 1345 | || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 1346 | && OMP_CLAUSE_REDUCTION_TASK (c) |
| 1347 | && is_parallel_ctx (ctx))) |
| 1348 | { |
| 1349 | /* Global variables don't need to be copied, |
| 1350 | the receiver side will use them directly. */ |
| 1351 | if (!is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))) |
| 1352 | { |
| 1353 | by_ref = use_pointer_for_field (decl, shared_ctx: ctx); |
| 1354 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
| 1355 | install_var_field (var: decl, by_ref, mask: 3, ctx); |
| 1356 | } |
| 1357 | install_var_local (var: decl, ctx); |
| 1358 | break; |
| 1359 | } |
| 1360 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 1361 | && OMP_CLAUSE_REDUCTION_TASK (c)) |
| 1362 | { |
| 1363 | install_var_local (var: decl, ctx); |
| 1364 | break; |
| 1365 | } |
| 1366 | goto do_private; |
| 1367 | |
| 1368 | case OMP_CLAUSE_LASTPRIVATE: |
| 1369 | /* Let the corresponding firstprivate clause create |
| 1370 | the variable. */ |
| 1371 | if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) |
| 1372 | break; |
| 1373 | /* FALLTHRU */ |
| 1374 | |
| 1375 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 1376 | case OMP_CLAUSE_LINEAR: |
| 1377 | decl = OMP_CLAUSE_DECL (c); |
| 1378 | do_private: |
| 1379 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
| 1380 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR |
| 1381 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 1382 | && is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1383 | { |
| 1384 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
| 1385 | || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR |
| 1386 | && lang_hooks.decls.omp_array_data (decl, true))) |
| 1387 | { |
| 1388 | by_ref = !omp_privatize_by_reference (decl); |
| 1389 | install_var_field (var: decl, by_ref, mask: 3, ctx); |
| 1390 | } |
| 1391 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 1392 | { |
| 1393 | if (INDIRECT_REF_P (decl)) |
| 1394 | decl = TREE_OPERAND (decl, 0); |
| 1395 | install_var_field (var: decl, by_ref: true, mask: 3, ctx); |
| 1396 | } |
| 1397 | else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1398 | install_var_field (var: decl, by_ref: true, mask: 3, ctx); |
| 1399 | else |
| 1400 | install_var_field (var: decl, by_ref: false, mask: 3, ctx); |
| 1401 | } |
| 1402 | if (is_variable_sized (expr: decl)) |
| 1403 | { |
| 1404 | if (is_task_ctx (ctx)) |
| 1405 | { |
| 1406 | if (ctx->allocate_map |
| 1407 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 1408 | { |
| 1409 | /* For now. */ |
| 1410 | if (ctx->allocate_map->get (k: decl)) |
| 1411 | ctx->allocate_map->remove (k: decl); |
| 1412 | } |
| 1413 | install_var_field (var: decl, by_ref: false, mask: 1, ctx); |
| 1414 | } |
| 1415 | break; |
| 1416 | } |
| 1417 | else if (is_taskreg_ctx (ctx)) |
| 1418 | { |
| 1419 | bool global |
| 1420 | = is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)); |
| 1421 | by_ref = use_pointer_for_field (decl, NULL); |
| 1422 | |
| 1423 | if (is_task_ctx (ctx) |
| 1424 | && (global || by_ref || omp_privatize_by_reference (decl))) |
| 1425 | { |
| 1426 | if (ctx->allocate_map |
| 1427 | && ctx->allocate_map->get (k: decl)) |
| 1428 | install_var_field (var: decl, by_ref, mask: 32 | 1, ctx); |
| 1429 | else |
| 1430 | install_var_field (var: decl, by_ref: false, mask: 1, ctx); |
| 1431 | if (!global) |
| 1432 | install_var_field (var: decl, by_ref, mask: 2, ctx); |
| 1433 | } |
| 1434 | else if (!global) |
| 1435 | install_var_field (var: decl, by_ref, mask: 3, ctx); |
| 1436 | } |
| 1437 | install_var_local (var: decl, ctx); |
| 1438 | /* For descr arrays on target: firstprivatize data + attach ptr. */ |
| 1439 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
| 1440 | && is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 1441 | && !is_gimple_omp_oacc (stmt: ctx->stmt) |
| 1442 | && lang_hooks.decls.omp_array_data (decl, true)) |
| 1443 | { |
| 1444 | install_var_field (var: decl, by_ref: false, mask: 16 | 3, ctx); |
| 1445 | install_var_field (var: decl, by_ref: true, mask: 8 | 3, ctx); |
| 1446 | } |
| 1447 | break; |
| 1448 | |
| 1449 | case OMP_CLAUSE_USE_DEVICE_PTR: |
| 1450 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
| 1451 | decl = OMP_CLAUSE_DECL (c); |
| 1452 | |
| 1453 | /* Fortran array descriptors. */ |
| 1454 | if (lang_hooks.decls.omp_array_data (decl, true)) |
| 1455 | install_var_field (var: decl, by_ref: false, mask: 19, ctx); |
| 1456 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR |
| 1457 | && !omp_privatize_by_reference (decl) |
| 1458 | && !omp_is_allocatable_or_ptr (decl)) |
| 1459 | || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1460 | install_var_field (var: decl, by_ref: true, mask: 11, ctx); |
| 1461 | else |
| 1462 | install_var_field (var: decl, by_ref: false, mask: 11, ctx); |
| 1463 | if (DECL_SIZE (decl) |
| 1464 | && !poly_int_tree_p (DECL_SIZE (decl))) |
| 1465 | { |
| 1466 | tree decl2 = DECL_VALUE_EXPR (decl); |
| 1467 | gcc_assert (INDIRECT_REF_P (decl2)); |
| 1468 | decl2 = TREE_OPERAND (decl2, 0); |
| 1469 | gcc_assert (DECL_P (decl2)); |
| 1470 | install_var_local (var: decl2, ctx); |
| 1471 | } |
| 1472 | install_var_local (var: decl, ctx); |
| 1473 | break; |
| 1474 | |
| 1475 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
| 1476 | decl = OMP_CLAUSE_DECL (c); |
| 1477 | while (INDIRECT_REF_P (decl) |
| 1478 | || TREE_CODE (decl) == ARRAY_REF) |
| 1479 | decl = TREE_OPERAND (decl, 0); |
| 1480 | goto do_private; |
| 1481 | |
| 1482 | case OMP_CLAUSE_IS_DEVICE_PTR: |
| 1483 | decl = OMP_CLAUSE_DECL (c); |
| 1484 | goto do_private; |
| 1485 | |
| 1486 | case OMP_CLAUSE__LOOPTEMP_: |
| 1487 | case OMP_CLAUSE__REDUCTEMP_: |
| 1488 | gcc_assert (is_taskreg_ctx (ctx)); |
| 1489 | decl = OMP_CLAUSE_DECL (c); |
| 1490 | install_var_field (var: decl, by_ref: false, mask: 3, ctx); |
| 1491 | install_var_local (var: decl, ctx); |
| 1492 | break; |
| 1493 | |
| 1494 | case OMP_CLAUSE_COPYPRIVATE: |
| 1495 | case OMP_CLAUSE_COPYIN: |
| 1496 | decl = OMP_CLAUSE_DECL (c); |
| 1497 | by_ref = use_pointer_for_field (decl, NULL); |
| 1498 | install_var_field (var: decl, by_ref, mask: 3, ctx); |
| 1499 | break; |
| 1500 | |
| 1501 | case OMP_CLAUSE_FINAL: |
| 1502 | case OMP_CLAUSE_IF: |
| 1503 | case OMP_CLAUSE_SELF: |
| 1504 | case OMP_CLAUSE_NUM_THREADS: |
| 1505 | case OMP_CLAUSE_NUM_TEAMS: |
| 1506 | case OMP_CLAUSE_THREAD_LIMIT: |
| 1507 | case OMP_CLAUSE_DEVICE: |
| 1508 | case OMP_CLAUSE_SCHEDULE: |
| 1509 | case OMP_CLAUSE_DIST_SCHEDULE: |
| 1510 | case OMP_CLAUSE_DEPEND: |
| 1511 | case OMP_CLAUSE_PRIORITY: |
| 1512 | case OMP_CLAUSE_GRAINSIZE: |
| 1513 | case OMP_CLAUSE_NUM_TASKS: |
| 1514 | case OMP_CLAUSE_NUM_GANGS: |
| 1515 | case OMP_CLAUSE_NUM_WORKERS: |
| 1516 | case OMP_CLAUSE_VECTOR_LENGTH: |
| 1517 | case OMP_CLAUSE_DETACH: |
| 1518 | case OMP_CLAUSE_FILTER: |
| 1519 | if (ctx->outer) |
| 1520 | scan_omp_op (tp: &OMP_CLAUSE_OPERAND (c, 0), ctx: ctx->outer); |
| 1521 | break; |
| 1522 | |
| 1523 | case OMP_CLAUSE_TO: |
| 1524 | case OMP_CLAUSE_FROM: |
| 1525 | case OMP_CLAUSE_MAP: |
| 1526 | if (ctx->outer) |
| 1527 | scan_omp_op (tp: &OMP_CLAUSE_SIZE (c), ctx: ctx->outer); |
| 1528 | decl = OMP_CLAUSE_DECL (c); |
| 1529 | /* If requested, make 'decl' addressable. */ |
| 1530 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1531 | && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c)) |
| 1532 | { |
| 1533 | gcc_checking_assert (DECL_P (decl)); |
| 1534 | |
| 1535 | bool decl_addressable = TREE_ADDRESSABLE (decl); |
| 1536 | if (!decl_addressable) |
| 1537 | { |
| 1538 | if (!make_addressable_vars) |
| 1539 | make_addressable_vars = BITMAP_ALLOC (NULL); |
| 1540 | bitmap_set_bit (make_addressable_vars, DECL_UID (decl)); |
| 1541 | TREE_ADDRESSABLE (decl) = 1; |
| 1542 | } |
| 1543 | |
| 1544 | if (dump_enabled_p ()) |
| 1545 | { |
| 1546 | location_t loc = OMP_CLAUSE_LOCATION (c); |
| 1547 | const dump_user_location_t d_u_loc |
| 1548 | = dump_user_location_t::from_location_t (loc); |
| 1549 | /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */ |
| 1550 | #if __GNUC__ >= 10 |
| 1551 | # pragma GCC diagnostic push |
| 1552 | # pragma GCC diagnostic ignored "-Wformat" |
| 1553 | #endif |
| 1554 | if (!decl_addressable) |
| 1555 | dump_printf_loc (MSG_NOTE, d_u_loc, |
| 1556 | "variable %<%T%>" |
| 1557 | " made addressable\n" , |
| 1558 | decl); |
| 1559 | else |
| 1560 | dump_printf_loc (MSG_NOTE, d_u_loc, |
| 1561 | "variable %<%T%>" |
| 1562 | " already made addressable\n" , |
| 1563 | decl); |
| 1564 | #if __GNUC__ >= 10 |
| 1565 | # pragma GCC diagnostic pop |
| 1566 | #endif |
| 1567 | } |
| 1568 | |
| 1569 | /* Done. */ |
| 1570 | OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0; |
| 1571 | } |
| 1572 | /* Global variables with "omp declare target" attribute |
| 1573 | don't need to be copied, the receiver side will use them |
| 1574 | directly. However, global variables with "omp declare target link" |
| 1575 | attribute need to be copied. Or when ALWAYS modifier is used. */ |
| 1576 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1577 | && DECL_P (decl) |
| 1578 | && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER |
| 1579 | && (OMP_CLAUSE_MAP_KIND (c) |
| 1580 | != GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
| 1581 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
| 1582 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH) |
| 1583 | || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1584 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO |
| 1585 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM |
| 1586 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM |
| 1587 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO |
| 1588 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM |
| 1589 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM |
| 1590 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET |
| 1591 | && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)) |
| 1592 | && varpool_node::get_create (decl)->offloadable |
| 1593 | && !lookup_attribute (attr_name: "omp declare target link" , |
| 1594 | DECL_ATTRIBUTES (decl))) |
| 1595 | break; |
| 1596 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1597 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER) |
| 1598 | { |
| 1599 | /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are |
| 1600 | not offloaded; there is nothing to map for those. */ |
| 1601 | if (!is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 1602 | && !POINTER_TYPE_P (TREE_TYPE (decl)) |
| 1603 | && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)) |
| 1604 | break; |
| 1605 | } |
| 1606 | if (!flex_array_ptr) |
| 1607 | flex_array_ptr = lang_hooks.decls.omp_deep_mapping_p (ctx->stmt, c); |
| 1608 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1609 | && DECL_P (decl) |
| 1610 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
| 1611 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
| 1612 | && is_omp_target (stmt: ctx->stmt)) |
| 1613 | { |
| 1614 | /* If this is an offloaded region, an attach operation should |
| 1615 | only exist when the pointer variable is mapped in a prior |
| 1616 | clause. An exception is if we have a reference (to pointer): |
| 1617 | in that case we should have mapped "*decl" in a previous |
| 1618 | mapping instead of "decl". Skip the assertion in that case. |
| 1619 | If we had an error, we may not have attempted to sort clauses |
| 1620 | properly, so avoid the test. */ |
| 1621 | if (TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE |
| 1622 | && is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 1623 | && !seen_error ()) |
| 1624 | gcc_assert |
| 1625 | (maybe_lookup_decl (decl, ctx) |
| 1626 | || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)) |
| 1627 | && lookup_attribute ("omp declare target" , |
| 1628 | DECL_ATTRIBUTES (decl)))); |
| 1629 | |
| 1630 | /* By itself, attach/detach is generated as part of pointer |
| 1631 | variable mapping and should not create new variables in the |
| 1632 | offloaded region, however sender refs for it must be created |
| 1633 | for its address to be passed to the runtime. */ |
| 1634 | tree field |
| 1635 | = build_decl (OMP_CLAUSE_LOCATION (c), |
| 1636 | FIELD_DECL, NULL_TREE, ptr_type_node); |
| 1637 | SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node)); |
| 1638 | insert_field_into_struct (ctx->record_type, field); |
| 1639 | /* To not clash with a map of the pointer variable itself, |
| 1640 | attach/detach maps have their field looked up by the *clause* |
| 1641 | tree expression, not the decl. */ |
| 1642 | gcc_assert (!splay_tree_lookup (ctx->field_map, |
| 1643 | (splay_tree_key) c)); |
| 1644 | splay_tree_insert (ctx->field_map, (splay_tree_key) c, |
| 1645 | (splay_tree_value) field); |
| 1646 | break; |
| 1647 | } |
| 1648 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1649 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER |
| 1650 | || (OMP_CLAUSE_MAP_KIND (c) |
| 1651 | == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) |
| 1652 | { |
| 1653 | if (TREE_CODE (decl) == COMPONENT_REF |
| 1654 | || (INDIRECT_REF_P (decl) |
| 1655 | && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF |
| 1656 | && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) |
| 1657 | == REFERENCE_TYPE) |
| 1658 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) |
| 1659 | == POINTER_TYPE))))) |
| 1660 | break; |
| 1661 | if (DECL_SIZE (decl) |
| 1662 | && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) |
| 1663 | { |
| 1664 | tree decl2 = DECL_VALUE_EXPR (decl); |
| 1665 | gcc_assert (INDIRECT_REF_P (decl2)); |
| 1666 | decl2 = TREE_OPERAND (decl2, 0); |
| 1667 | gcc_assert (DECL_P (decl2)); |
| 1668 | install_var_local (var: decl2, ctx); |
| 1669 | } |
| 1670 | install_var_local (var: decl, ctx); |
| 1671 | break; |
| 1672 | } |
| 1673 | if (DECL_P (decl)) |
| 1674 | { |
| 1675 | if (DECL_SIZE (decl) |
| 1676 | && !poly_int_tree_p (DECL_SIZE (decl))) |
| 1677 | { |
| 1678 | tree decl2 = DECL_VALUE_EXPR (decl); |
| 1679 | gcc_assert (INDIRECT_REF_P (decl2)); |
| 1680 | decl2 = TREE_OPERAND (decl2, 0); |
| 1681 | gcc_assert (DECL_P (decl2)); |
| 1682 | install_var_field (var: decl2, by_ref: true, mask: 3, ctx); |
| 1683 | install_var_local (var: decl2, ctx); |
| 1684 | install_var_local (var: decl, ctx); |
| 1685 | } |
| 1686 | else |
| 1687 | { |
| 1688 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 1689 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
| 1690 | && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) |
| 1691 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1692 | install_var_field (var: decl, by_ref: true, mask: 7, ctx); |
| 1693 | else |
| 1694 | install_var_field (var: decl, by_ref: true, mask: 3, ctx); |
| 1695 | if (is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 1696 | && !(is_gimple_omp_oacc (stmt: ctx->stmt) |
| 1697 | && OMP_CLAUSE_MAP_IN_REDUCTION (c))) |
| 1698 | install_var_local (var: decl, ctx); |
| 1699 | } |
| 1700 | } |
| 1701 | else |
| 1702 | { |
| 1703 | tree base = get_base_address (t: decl); |
| 1704 | tree nc = OMP_CLAUSE_CHAIN (c); |
| 1705 | if (DECL_P (base) |
| 1706 | && nc != NULL_TREE |
| 1707 | && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP |
| 1708 | && OMP_CLAUSE_DECL (nc) == base |
| 1709 | && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER |
| 1710 | && integer_zerop (OMP_CLAUSE_SIZE (nc))) |
| 1711 | { |
| 1712 | OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1; |
| 1713 | OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1; |
| 1714 | } |
| 1715 | else |
| 1716 | { |
| 1717 | if (ctx->outer) |
| 1718 | { |
| 1719 | scan_omp_op (tp: &OMP_CLAUSE_DECL (c), ctx: ctx->outer); |
| 1720 | decl = OMP_CLAUSE_DECL (c); |
| 1721 | } |
| 1722 | gcc_assert (!splay_tree_lookup (ctx->field_map, |
| 1723 | (splay_tree_key) decl)); |
| 1724 | tree field |
| 1725 | = build_decl (OMP_CLAUSE_LOCATION (c), |
| 1726 | FIELD_DECL, NULL_TREE, ptr_type_node); |
| 1727 | SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node)); |
| 1728 | insert_field_into_struct (ctx->record_type, field); |
| 1729 | splay_tree_insert (ctx->field_map, (splay_tree_key) decl, |
| 1730 | (splay_tree_value) field); |
| 1731 | } |
| 1732 | } |
| 1733 | break; |
| 1734 | |
| 1735 | case OMP_CLAUSE_ORDER: |
| 1736 | ctx->order_concurrent = true; |
| 1737 | break; |
| 1738 | |
| 1739 | case OMP_CLAUSE_BIND: |
| 1740 | ctx->loop_p = true; |
| 1741 | break; |
| 1742 | |
| 1743 | case OMP_CLAUSE_NOWAIT: |
| 1744 | case OMP_CLAUSE_ORDERED: |
| 1745 | case OMP_CLAUSE_COLLAPSE: |
| 1746 | case OMP_CLAUSE_UNTIED: |
| 1747 | case OMP_CLAUSE_MERGEABLE: |
| 1748 | case OMP_CLAUSE_PROC_BIND: |
| 1749 | case OMP_CLAUSE_SAFELEN: |
| 1750 | case OMP_CLAUSE_SIMDLEN: |
| 1751 | case OMP_CLAUSE_THREADS: |
| 1752 | case OMP_CLAUSE_SIMD: |
| 1753 | case OMP_CLAUSE_NOGROUP: |
| 1754 | case OMP_CLAUSE_DEFAULTMAP: |
| 1755 | case OMP_CLAUSE_ASYNC: |
| 1756 | case OMP_CLAUSE_WAIT: |
| 1757 | case OMP_CLAUSE_GANG: |
| 1758 | case OMP_CLAUSE_WORKER: |
| 1759 | case OMP_CLAUSE_VECTOR: |
| 1760 | case OMP_CLAUSE_INDEPENDENT: |
| 1761 | case OMP_CLAUSE_AUTO: |
| 1762 | case OMP_CLAUSE_SEQ: |
| 1763 | case OMP_CLAUSE_TILE: |
| 1764 | case OMP_CLAUSE__SIMT_: |
| 1765 | case OMP_CLAUSE_DEFAULT: |
| 1766 | case OMP_CLAUSE_NONTEMPORAL: |
| 1767 | case OMP_CLAUSE_IF_PRESENT: |
| 1768 | case OMP_CLAUSE_FINALIZE: |
| 1769 | case OMP_CLAUSE_TASK_REDUCTION: |
| 1770 | case OMP_CLAUSE_ALLOCATE: |
| 1771 | case OMP_CLAUSE_DEVICE_TYPE: |
| 1772 | break; |
| 1773 | |
| 1774 | case OMP_CLAUSE_ALIGNED: |
| 1775 | decl = OMP_CLAUSE_DECL (c); |
| 1776 | if (is_global_var (t: decl) |
| 1777 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1778 | install_var_local (var: decl, ctx); |
| 1779 | break; |
| 1780 | |
| 1781 | case OMP_CLAUSE__CONDTEMP_: |
| 1782 | decl = OMP_CLAUSE_DECL (c); |
| 1783 | if (is_parallel_ctx (ctx)) |
| 1784 | { |
| 1785 | install_var_field (var: decl, by_ref: false, mask: 3, ctx); |
| 1786 | install_var_local (var: decl, ctx); |
| 1787 | } |
| 1788 | else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 1789 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD |
| 1790 | && !OMP_CLAUSE__CONDTEMP__ITER (c)) |
| 1791 | install_var_local (var: decl, ctx); |
| 1792 | break; |
| 1793 | |
| 1794 | case OMP_CLAUSE_INIT: |
| 1795 | case OMP_CLAUSE_USE: |
| 1796 | case OMP_CLAUSE_DESTROY: |
| 1797 | break; |
| 1798 | |
| 1799 | case OMP_CLAUSE__CACHE_: |
| 1800 | case OMP_CLAUSE_NOHOST: |
| 1801 | default: |
| 1802 | gcc_unreachable (); |
| 1803 | } |
| 1804 | } |
| 1805 | |
| 1806 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 1807 | { |
| 1808 | switch (OMP_CLAUSE_CODE (c)) |
| 1809 | { |
| 1810 | case OMP_CLAUSE_LASTPRIVATE: |
| 1811 | /* Let the corresponding firstprivate clause create |
| 1812 | the variable. */ |
| 1813 | if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) |
| 1814 | scan_array_reductions = true; |
| 1815 | if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) |
| 1816 | break; |
| 1817 | /* FALLTHRU */ |
| 1818 | |
| 1819 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 1820 | case OMP_CLAUSE_PRIVATE: |
| 1821 | case OMP_CLAUSE_LINEAR: |
| 1822 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
| 1823 | case OMP_CLAUSE_IS_DEVICE_PTR: |
| 1824 | decl = OMP_CLAUSE_DECL (c); |
| 1825 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 1826 | { |
| 1827 | while (INDIRECT_REF_P (decl) |
| 1828 | || TREE_CODE (decl) == ARRAY_REF) |
| 1829 | decl = TREE_OPERAND (decl, 0); |
| 1830 | } |
| 1831 | |
| 1832 | if (is_variable_sized (expr: decl)) |
| 1833 | { |
| 1834 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
| 1835 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR |
| 1836 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 1837 | && is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1838 | { |
| 1839 | tree decl2 = DECL_VALUE_EXPR (decl); |
| 1840 | gcc_assert (INDIRECT_REF_P (decl2)); |
| 1841 | decl2 = TREE_OPERAND (decl2, 0); |
| 1842 | gcc_assert (DECL_P (decl2)); |
| 1843 | install_var_local (var: decl2, ctx); |
| 1844 | fixup_remapped_decl (decl: decl2, ctx, private_debug: false); |
| 1845 | } |
| 1846 | install_var_local (var: decl, ctx); |
| 1847 | } |
| 1848 | fixup_remapped_decl (decl, ctx, |
| 1849 | OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
| 1850 | && OMP_CLAUSE_PRIVATE_DEBUG (c)); |
| 1851 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 1852 | && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) |
| 1853 | scan_array_reductions = true; |
| 1854 | break; |
| 1855 | |
| 1856 | case OMP_CLAUSE_REDUCTION: |
| 1857 | case OMP_CLAUSE_IN_REDUCTION: |
| 1858 | decl = OMP_CLAUSE_DECL (c); |
| 1859 | if (TREE_CODE (decl) != MEM_REF && !is_omp_target (stmt: ctx->stmt)) |
| 1860 | { |
| 1861 | if (is_variable_sized (expr: decl)) |
| 1862 | install_var_local (var: decl, ctx); |
| 1863 | fixup_remapped_decl (decl, ctx, private_debug: false); |
| 1864 | } |
| 1865 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 1866 | scan_array_reductions = true; |
| 1867 | break; |
| 1868 | |
| 1869 | case OMP_CLAUSE_TASK_REDUCTION: |
| 1870 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 1871 | scan_array_reductions = true; |
| 1872 | break; |
| 1873 | |
| 1874 | case OMP_CLAUSE_SHARED: |
| 1875 | /* Ignore shared directives in teams construct inside of |
| 1876 | target construct. */ |
| 1877 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 1878 | && !is_host_teams_ctx (ctx)) |
| 1879 | break; |
| 1880 | decl = OMP_CLAUSE_DECL (c); |
| 1881 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))) |
| 1882 | break; |
| 1883 | if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 1884 | { |
| 1885 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, |
| 1886 | ctx->outer))) |
| 1887 | break; |
| 1888 | bool by_ref = use_pointer_for_field (decl, shared_ctx: ctx); |
| 1889 | install_var_field (var: decl, by_ref, mask: 11, ctx); |
| 1890 | break; |
| 1891 | } |
| 1892 | fixup_remapped_decl (decl, ctx, private_debug: false); |
| 1893 | break; |
| 1894 | |
| 1895 | case OMP_CLAUSE_MAP: |
| 1896 | if (!is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1897 | break; |
| 1898 | decl = OMP_CLAUSE_DECL (c); |
| 1899 | if (DECL_P (decl) |
| 1900 | && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER |
| 1901 | && (OMP_CLAUSE_MAP_KIND (c) |
| 1902 | != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) |
| 1903 | || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
| 1904 | && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)) |
| 1905 | && varpool_node::get_create (decl)->offloadable) |
| 1906 | break; |
| 1907 | if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
| 1908 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
| 1909 | && is_omp_target (stmt: ctx->stmt) |
| 1910 | && !is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 1911 | break; |
| 1912 | if (DECL_P (decl)) |
| 1913 | { |
| 1914 | if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
| 1915 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) |
| 1916 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
| 1917 | && !COMPLETE_TYPE_P (TREE_TYPE (decl))) |
| 1918 | { |
| 1919 | tree new_decl = lookup_decl (var: decl, ctx); |
| 1920 | TREE_TYPE (new_decl) |
| 1921 | = remap_type (TREE_TYPE (decl), id: &ctx->cb); |
| 1922 | } |
| 1923 | else if (DECL_SIZE (decl) |
| 1924 | && !poly_int_tree_p (DECL_SIZE (decl))) |
| 1925 | { |
| 1926 | tree decl2 = DECL_VALUE_EXPR (decl); |
| 1927 | gcc_assert (INDIRECT_REF_P (decl2)); |
| 1928 | decl2 = TREE_OPERAND (decl2, 0); |
| 1929 | gcc_assert (DECL_P (decl2)); |
| 1930 | fixup_remapped_decl (decl: decl2, ctx, private_debug: false); |
| 1931 | fixup_remapped_decl (decl, ctx, private_debug: true); |
| 1932 | } |
| 1933 | else |
| 1934 | fixup_remapped_decl (decl, ctx, private_debug: false); |
| 1935 | } |
| 1936 | break; |
| 1937 | |
| 1938 | case OMP_CLAUSE_COPYPRIVATE: |
| 1939 | case OMP_CLAUSE_COPYIN: |
| 1940 | case OMP_CLAUSE_DEFAULT: |
| 1941 | case OMP_CLAUSE_IF: |
| 1942 | case OMP_CLAUSE_SELF: |
| 1943 | case OMP_CLAUSE_NUM_THREADS: |
| 1944 | case OMP_CLAUSE_NUM_TEAMS: |
| 1945 | case OMP_CLAUSE_THREAD_LIMIT: |
| 1946 | case OMP_CLAUSE_DEVICE: |
| 1947 | case OMP_CLAUSE_SCHEDULE: |
| 1948 | case OMP_CLAUSE_DIST_SCHEDULE: |
| 1949 | case OMP_CLAUSE_NOWAIT: |
| 1950 | case OMP_CLAUSE_ORDERED: |
| 1951 | case OMP_CLAUSE_COLLAPSE: |
| 1952 | case OMP_CLAUSE_UNTIED: |
| 1953 | case OMP_CLAUSE_FINAL: |
| 1954 | case OMP_CLAUSE_MERGEABLE: |
| 1955 | case OMP_CLAUSE_PROC_BIND: |
| 1956 | case OMP_CLAUSE_SAFELEN: |
| 1957 | case OMP_CLAUSE_SIMDLEN: |
| 1958 | case OMP_CLAUSE_ALIGNED: |
| 1959 | case OMP_CLAUSE_DEPEND: |
| 1960 | case OMP_CLAUSE_DETACH: |
| 1961 | case OMP_CLAUSE_ALLOCATE: |
| 1962 | case OMP_CLAUSE__LOOPTEMP_: |
| 1963 | case OMP_CLAUSE__REDUCTEMP_: |
| 1964 | case OMP_CLAUSE_TO: |
| 1965 | case OMP_CLAUSE_FROM: |
| 1966 | case OMP_CLAUSE_PRIORITY: |
| 1967 | case OMP_CLAUSE_GRAINSIZE: |
| 1968 | case OMP_CLAUSE_NUM_TASKS: |
| 1969 | case OMP_CLAUSE_THREADS: |
| 1970 | case OMP_CLAUSE_SIMD: |
| 1971 | case OMP_CLAUSE_NOGROUP: |
| 1972 | case OMP_CLAUSE_DEFAULTMAP: |
| 1973 | case OMP_CLAUSE_ORDER: |
| 1974 | case OMP_CLAUSE_BIND: |
| 1975 | case OMP_CLAUSE_USE_DEVICE_PTR: |
| 1976 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
| 1977 | case OMP_CLAUSE_NONTEMPORAL: |
| 1978 | case OMP_CLAUSE_ASYNC: |
| 1979 | case OMP_CLAUSE_WAIT: |
| 1980 | case OMP_CLAUSE_NUM_GANGS: |
| 1981 | case OMP_CLAUSE_NUM_WORKERS: |
| 1982 | case OMP_CLAUSE_VECTOR_LENGTH: |
| 1983 | case OMP_CLAUSE_GANG: |
| 1984 | case OMP_CLAUSE_WORKER: |
| 1985 | case OMP_CLAUSE_VECTOR: |
| 1986 | case OMP_CLAUSE_INDEPENDENT: |
| 1987 | case OMP_CLAUSE_AUTO: |
| 1988 | case OMP_CLAUSE_SEQ: |
| 1989 | case OMP_CLAUSE_TILE: |
| 1990 | case OMP_CLAUSE__SIMT_: |
| 1991 | case OMP_CLAUSE_IF_PRESENT: |
| 1992 | case OMP_CLAUSE_FINALIZE: |
| 1993 | case OMP_CLAUSE_FILTER: |
| 1994 | case OMP_CLAUSE__CONDTEMP_: |
| 1995 | case OMP_CLAUSE_INIT: |
| 1996 | case OMP_CLAUSE_USE: |
| 1997 | case OMP_CLAUSE_DESTROY: |
| 1998 | case OMP_CLAUSE_DEVICE_TYPE: |
| 1999 | break; |
| 2000 | |
| 2001 | case OMP_CLAUSE__CACHE_: |
| 2002 | case OMP_CLAUSE_NOHOST: |
| 2003 | default: |
| 2004 | gcc_unreachable (); |
| 2005 | } |
| 2006 | } |
| 2007 | |
| 2008 | gcc_checking_assert (!scan_array_reductions |
| 2009 | || !is_gimple_omp_oacc (ctx->stmt)); |
| 2010 | if (scan_array_reductions) |
| 2011 | { |
| 2012 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 2013 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 2014 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
| 2015 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
| 2016 | && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 2017 | { |
| 2018 | omp_context *rctx = ctx; |
| 2019 | if (is_omp_target (stmt: ctx->stmt)) |
| 2020 | rctx = ctx->outer; |
| 2021 | scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx); |
| 2022 | scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx); |
| 2023 | } |
| 2024 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 2025 | && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) |
| 2026 | scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx); |
| 2027 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 2028 | && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) |
| 2029 | scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx); |
| 2030 | } |
| 2031 | if (flex_array_ptr) |
| 2032 | { |
| 2033 | tree field = build_range_type (size_type_node, |
| 2034 | build_int_cstu (size_type_node, 0), |
| 2035 | NULL_TREE); |
| 2036 | field = build_array_type (ptr_type_node, field); |
| 2037 | field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, field); |
| 2038 | SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node)); |
| 2039 | DECL_CONTEXT (field) = ctx->record_type; |
| 2040 | DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type); |
| 2041 | TYPE_FIELDS (ctx->record_type) = field; |
| 2042 | } |
| 2043 | } |
| 2044 | |
| 2045 | /* Create a new name for omp child function. Returns an identifier. */ |
| 2046 | |
| 2047 | static tree |
| 2048 | create_omp_child_function_name (bool task_copy) |
| 2049 | { |
| 2050 | return clone_function_name_numbered (decl: current_function_decl, |
| 2051 | suffix: task_copy ? "_omp_cpyfn" : "_omp_fn" ); |
| 2052 | } |
| 2053 | |
| 2054 | /* Return true if CTX may belong to offloaded code: either if current function |
| 2055 | is offloaded, or any enclosing context corresponds to a target region. */ |
| 2056 | |
| 2057 | static bool |
| 2058 | omp_maybe_offloaded_ctx (omp_context *ctx) |
| 2059 | { |
| 2060 | if (cgraph_node::get (decl: current_function_decl)->offloadable) |
| 2061 | return true; |
| 2062 | for (; ctx; ctx = ctx->outer) |
| 2063 | if (is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 2064 | return true; |
| 2065 | return false; |
| 2066 | } |
| 2067 | |
| 2068 | /* Build a decl for the omp child function. It'll not contain a body |
| 2069 | yet, just the bare decl. */ |
| 2070 | |
| 2071 | static void |
| 2072 | create_omp_child_function (omp_context *ctx, bool task_copy) |
| 2073 | { |
| 2074 | tree decl, type, name, t; |
| 2075 | |
| 2076 | name = create_omp_child_function_name (task_copy); |
| 2077 | if (task_copy) |
| 2078 | type = build_function_type_list (void_type_node, ptr_type_node, |
| 2079 | ptr_type_node, NULL_TREE); |
| 2080 | else |
| 2081 | type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); |
| 2082 | |
| 2083 | decl = build_decl (gimple_location (g: ctx->stmt), FUNCTION_DECL, name, type); |
| 2084 | |
| 2085 | gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt) |
| 2086 | || !task_copy); |
| 2087 | if (!task_copy) |
| 2088 | ctx->cb.dst_fn = decl; |
| 2089 | else |
| 2090 | gimple_omp_task_set_copy_fn (gs: ctx->stmt, copy_fn: decl); |
| 2091 | |
| 2092 | TREE_STATIC (decl) = 1; |
| 2093 | TREE_USED (decl) = 1; |
| 2094 | DECL_ARTIFICIAL (decl) = 1; |
| 2095 | DECL_IGNORED_P (decl) = 0; |
| 2096 | TREE_PUBLIC (decl) = 0; |
| 2097 | DECL_UNINLINABLE (decl) = 1; |
| 2098 | DECL_EXTERNAL (decl) = 0; |
| 2099 | DECL_CONTEXT (decl) = NULL_TREE; |
| 2100 | DECL_INITIAL (decl) = make_node (BLOCK); |
| 2101 | BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl; |
| 2102 | DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl); |
| 2103 | /* Remove omp declare simd attribute from the new attributes. */ |
| 2104 | if (tree a = lookup_attribute (attr_name: "omp declare simd" , DECL_ATTRIBUTES (decl))) |
| 2105 | { |
| 2106 | while (tree a2 = lookup_attribute (attr_name: "omp declare simd" , TREE_CHAIN (a))) |
| 2107 | a = a2; |
| 2108 | a = TREE_CHAIN (a); |
| 2109 | for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;) |
| 2110 | if (is_attribute_p (attr_name: "omp declare simd" , ident: get_attribute_name (*p))) |
| 2111 | *p = TREE_CHAIN (*p); |
| 2112 | else |
| 2113 | { |
| 2114 | tree chain = TREE_CHAIN (*p); |
| 2115 | *p = copy_node (*p); |
| 2116 | p = &TREE_CHAIN (*p); |
| 2117 | *p = chain; |
| 2118 | } |
| 2119 | } |
| 2120 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) |
| 2121 | = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl); |
| 2122 | DECL_FUNCTION_SPECIFIC_TARGET (decl) |
| 2123 | = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl); |
| 2124 | DECL_FUNCTION_VERSIONED (decl) |
| 2125 | = DECL_FUNCTION_VERSIONED (current_function_decl); |
| 2126 | |
| 2127 | if (omp_maybe_offloaded_ctx (ctx)) |
| 2128 | { |
| 2129 | cgraph_node::get_create (decl)->offloadable = 1; |
| 2130 | if (ENABLE_OFFLOADING) |
| 2131 | g->have_offload = true; |
| 2132 | } |
| 2133 | |
| 2134 | if (cgraph_node::get_create (decl)->offloadable) |
| 2135 | { |
| 2136 | const char *target_attr = (is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 2137 | ? "omp target entrypoint" |
| 2138 | : "omp declare target" ); |
| 2139 | if (lookup_attribute (attr_name: "omp declare target" , |
| 2140 | DECL_ATTRIBUTES (current_function_decl))) |
| 2141 | { |
| 2142 | if (is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 2143 | DECL_ATTRIBUTES (decl) |
| 2144 | = remove_attribute ("omp declare target" , |
| 2145 | copy_list (DECL_ATTRIBUTES (decl))); |
| 2146 | else |
| 2147 | target_attr = NULL; |
| 2148 | } |
| 2149 | if (target_attr |
| 2150 | && is_gimple_omp_offloaded (stmt: ctx->stmt) |
| 2151 | && lookup_attribute (attr_name: "noclone" , DECL_ATTRIBUTES (decl)) == NULL_TREE) |
| 2152 | DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone" ), |
| 2153 | NULL_TREE, DECL_ATTRIBUTES (decl)); |
| 2154 | if (target_attr) |
| 2155 | DECL_ATTRIBUTES (decl) |
| 2156 | = tree_cons (get_identifier (target_attr), |
| 2157 | NULL_TREE, DECL_ATTRIBUTES (decl)); |
| 2158 | } |
| 2159 | |
| 2160 | t = build_decl (DECL_SOURCE_LOCATION (decl), |
| 2161 | RESULT_DECL, NULL_TREE, void_type_node); |
| 2162 | DECL_ARTIFICIAL (t) = 1; |
| 2163 | DECL_IGNORED_P (t) = 1; |
| 2164 | DECL_CONTEXT (t) = decl; |
| 2165 | DECL_RESULT (decl) = t; |
| 2166 | |
| 2167 | tree data_name = get_identifier (".omp_data_i" ); |
| 2168 | t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name, |
| 2169 | ptr_type_node); |
| 2170 | DECL_ARTIFICIAL (t) = 1; |
| 2171 | DECL_NAMELESS (t) = 1; |
| 2172 | DECL_ARG_TYPE (t) = ptr_type_node; |
| 2173 | DECL_CONTEXT (t) = current_function_decl; |
| 2174 | TREE_USED (t) = 1; |
| 2175 | TREE_READONLY (t) = 1; |
| 2176 | DECL_ARGUMENTS (decl) = t; |
| 2177 | if (!task_copy) |
| 2178 | ctx->receiver_decl = t; |
| 2179 | else |
| 2180 | { |
| 2181 | t = build_decl (DECL_SOURCE_LOCATION (decl), |
| 2182 | PARM_DECL, get_identifier (".omp_data_o" ), |
| 2183 | ptr_type_node); |
| 2184 | DECL_ARTIFICIAL (t) = 1; |
| 2185 | DECL_NAMELESS (t) = 1; |
| 2186 | DECL_ARG_TYPE (t) = ptr_type_node; |
| 2187 | DECL_CONTEXT (t) = current_function_decl; |
| 2188 | TREE_USED (t) = 1; |
| 2189 | TREE_ADDRESSABLE (t) = 1; |
| 2190 | DECL_CHAIN (t) = DECL_ARGUMENTS (decl); |
| 2191 | DECL_ARGUMENTS (decl) = t; |
| 2192 | } |
| 2193 | |
| 2194 | /* Allocate memory for the function structure. The call to |
| 2195 | allocate_struct_function clobbers CFUN, so we need to restore |
| 2196 | it afterward. */ |
| 2197 | push_struct_function (fndecl: decl); |
| 2198 | cfun->function_end_locus = gimple_location (g: ctx->stmt); |
| 2199 | init_tree_ssa (cfun); |
| 2200 | pop_cfun (); |
| 2201 | } |
| 2202 | |
| 2203 | /* Callback for walk_gimple_seq. Check if combined parallel |
| 2204 | contains gimple_omp_for_combined_into_p OMP_FOR. */ |
| 2205 | |
| 2206 | tree |
| 2207 | omp_find_combined_for (gimple_stmt_iterator *gsi_p, |
| 2208 | bool *handled_ops_p, |
| 2209 | struct walk_stmt_info *wi) |
| 2210 | { |
| 2211 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 2212 | |
| 2213 | *handled_ops_p = true; |
| 2214 | switch (gimple_code (g: stmt)) |
| 2215 | { |
| 2216 | WALK_SUBSTMTS; |
| 2217 | |
| 2218 | case GIMPLE_OMP_FOR: |
| 2219 | if (gimple_omp_for_combined_into_p (g: stmt) |
| 2220 | && gimple_omp_for_kind (g: stmt) |
| 2221 | == *(const enum gf_mask *) (wi->info)) |
| 2222 | { |
| 2223 | wi->info = stmt; |
| 2224 | return integer_zero_node; |
| 2225 | } |
| 2226 | break; |
| 2227 | default: |
| 2228 | break; |
| 2229 | } |
| 2230 | return NULL; |
| 2231 | } |
| 2232 | |
| 2233 | /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */ |
| 2234 | |
| 2235 | static void |
| 2236 | add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt, |
| 2237 | omp_context *outer_ctx) |
| 2238 | { |
| 2239 | struct walk_stmt_info wi; |
| 2240 | |
| 2241 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 2242 | wi.val_only = true; |
| 2243 | wi.info = (void *) &msk; |
| 2244 | walk_gimple_seq (gimple_omp_body (gs: stmt), omp_find_combined_for, NULL, &wi); |
| 2245 | if (wi.info != (void *) &msk) |
| 2246 | { |
| 2247 | gomp_for *for_stmt = as_a <gomp_for *> (p: (gimple *) wi.info); |
| 2248 | struct omp_for_data fd; |
| 2249 | omp_extract_for_data (for_stmt, fd: &fd, NULL); |
| 2250 | /* We need two temporaries with fd.loop.v type (istart/iend) |
| 2251 | and then (fd.collapse - 1) temporaries with the same |
| 2252 | type for count2 ... countN-1 vars if not constant. */ |
| 2253 | size_t count = 2, i; |
| 2254 | tree type = fd.iter_type; |
| 2255 | if (fd.collapse > 1 |
| 2256 | && TREE_CODE (fd.loop.n2) != INTEGER_CST) |
| 2257 | { |
| 2258 | count += fd.collapse - 1; |
| 2259 | /* If there are lastprivate clauses on the inner |
| 2260 | GIMPLE_OMP_FOR, add one more temporaries for the total number |
| 2261 | of iterations (product of count1 ... countN-1). */ |
| 2262 | if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: for_stmt), |
| 2263 | kind: OMP_CLAUSE_LASTPRIVATE) |
| 2264 | || (msk == GF_OMP_FOR_KIND_FOR |
| 2265 | && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt), |
| 2266 | kind: OMP_CLAUSE_LASTPRIVATE))) |
| 2267 | { |
| 2268 | tree temp = create_tmp_var (type); |
| 2269 | tree c = build_omp_clause (UNKNOWN_LOCATION, |
| 2270 | OMP_CLAUSE__LOOPTEMP_); |
| 2271 | insert_decl_map (&outer_ctx->cb, temp, temp); |
| 2272 | OMP_CLAUSE_DECL (c) = temp; |
| 2273 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
| 2274 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
| 2275 | } |
| 2276 | if (fd.non_rect |
| 2277 | && fd.last_nonrect == fd.first_nonrect + 1) |
| 2278 | if (tree v = gimple_omp_for_index (gs: for_stmt, i: fd.last_nonrect)) |
| 2279 | if (!TYPE_UNSIGNED (TREE_TYPE (v))) |
| 2280 | { |
| 2281 | v = gimple_omp_for_index (gs: for_stmt, i: fd.first_nonrect); |
| 2282 | tree type2 = TREE_TYPE (v); |
| 2283 | count++; |
| 2284 | for (i = 0; i < 3; i++) |
| 2285 | { |
| 2286 | tree temp = create_tmp_var (type2); |
| 2287 | tree c = build_omp_clause (UNKNOWN_LOCATION, |
| 2288 | OMP_CLAUSE__LOOPTEMP_); |
| 2289 | insert_decl_map (&outer_ctx->cb, temp, temp); |
| 2290 | OMP_CLAUSE_DECL (c) = temp; |
| 2291 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
| 2292 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
| 2293 | } |
| 2294 | } |
| 2295 | } |
| 2296 | for (i = 0; i < count; i++) |
| 2297 | { |
| 2298 | tree temp = create_tmp_var (type); |
| 2299 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_); |
| 2300 | insert_decl_map (&outer_ctx->cb, temp, temp); |
| 2301 | OMP_CLAUSE_DECL (c) = temp; |
| 2302 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
| 2303 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
| 2304 | } |
| 2305 | } |
| 2306 | if (msk == GF_OMP_FOR_KIND_TASKLOOP |
| 2307 | && omp_find_clause (clauses: gimple_omp_task_clauses (gs: stmt), |
| 2308 | kind: OMP_CLAUSE_REDUCTION)) |
| 2309 | { |
| 2310 | tree type = build_pointer_type (pointer_sized_int_node); |
| 2311 | tree temp = create_tmp_var (type); |
| 2312 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); |
| 2313 | insert_decl_map (&outer_ctx->cb, temp, temp); |
| 2314 | OMP_CLAUSE_DECL (c) = temp; |
| 2315 | OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (gs: stmt); |
| 2316 | gimple_omp_task_set_clauses (gs: stmt, clauses: c); |
| 2317 | } |
| 2318 | } |
| 2319 | |
| 2320 | /* Scan an OpenMP parallel directive. */ |
| 2321 | |
| 2322 | static void |
| 2323 | scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx) |
| 2324 | { |
| 2325 | omp_context *ctx; |
| 2326 | tree name; |
| 2327 | gomp_parallel *stmt = as_a <gomp_parallel *> (p: gsi_stmt (i: *gsi)); |
| 2328 | |
| 2329 | /* Ignore parallel directives with empty bodies, unless there |
| 2330 | are copyin clauses. */ |
| 2331 | if (optimize > 0 |
| 2332 | && empty_body_p (gimple_omp_body (gs: stmt)) |
| 2333 | && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt), |
| 2334 | kind: OMP_CLAUSE_COPYIN) == NULL) |
| 2335 | { |
| 2336 | gsi_replace (gsi, gimple_build_nop (), false); |
| 2337 | return; |
| 2338 | } |
| 2339 | |
| 2340 | if (gimple_omp_parallel_combined_p (g: stmt)) |
| 2341 | add_taskreg_looptemp_clauses (msk: GF_OMP_FOR_KIND_FOR, stmt, outer_ctx); |
| 2342 | for (tree c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt), |
| 2343 | kind: OMP_CLAUSE_REDUCTION); |
| 2344 | c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), kind: OMP_CLAUSE_REDUCTION)) |
| 2345 | if (OMP_CLAUSE_REDUCTION_TASK (c)) |
| 2346 | { |
| 2347 | tree type = build_pointer_type (pointer_sized_int_node); |
| 2348 | tree temp = create_tmp_var (type); |
| 2349 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); |
| 2350 | if (outer_ctx) |
| 2351 | insert_decl_map (&outer_ctx->cb, temp, temp); |
| 2352 | OMP_CLAUSE_DECL (c) = temp; |
| 2353 | OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (gs: stmt); |
| 2354 | gimple_omp_parallel_set_clauses (omp_parallel_stmt: stmt, clauses: c); |
| 2355 | break; |
| 2356 | } |
| 2357 | else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE) |
| 2358 | break; |
| 2359 | |
| 2360 | ctx = new_omp_context (stmt, outer_ctx); |
| 2361 | taskreg_contexts.safe_push (obj: ctx); |
| 2362 | if (taskreg_nesting_level > 1) |
| 2363 | ctx->is_nested = true; |
| 2364 | ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 2365 | ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 2366 | name = create_tmp_var_name (".omp_data_s" ); |
| 2367 | name = build_decl (gimple_location (g: stmt), |
| 2368 | TYPE_DECL, name, ctx->record_type); |
| 2369 | DECL_ARTIFICIAL (name) = 1; |
| 2370 | DECL_NAMELESS (name) = 1; |
| 2371 | TYPE_NAME (ctx->record_type) = name; |
| 2372 | TYPE_ARTIFICIAL (ctx->record_type) = 1; |
| 2373 | create_omp_child_function (ctx, task_copy: false); |
| 2374 | gimple_omp_parallel_set_child_fn (omp_parallel_stmt: stmt, child_fn: ctx->cb.dst_fn); |
| 2375 | |
| 2376 | scan_sharing_clauses (clauses: gimple_omp_parallel_clauses (gs: stmt), ctx); |
| 2377 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 2378 | |
| 2379 | if (TYPE_FIELDS (ctx->record_type) == NULL) |
| 2380 | ctx->record_type = ctx->receiver_decl = NULL; |
| 2381 | } |
| 2382 | |
| 2383 | /* Scan an OpenMP task directive. */ |
| 2384 | |
| 2385 | static void |
| 2386 | scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx) |
| 2387 | { |
| 2388 | omp_context *ctx; |
| 2389 | tree name, t; |
| 2390 | gomp_task *stmt = as_a <gomp_task *> (p: gsi_stmt (i: *gsi)); |
| 2391 | |
| 2392 | /* Ignore task directives with empty bodies, unless they have depend |
| 2393 | clause. */ |
| 2394 | if (optimize > 0 |
| 2395 | && gimple_omp_body (gs: stmt) |
| 2396 | && empty_body_p (gimple_omp_body (gs: stmt)) |
| 2397 | && !omp_find_clause (clauses: gimple_omp_task_clauses (gs: stmt), kind: OMP_CLAUSE_DEPEND)) |
| 2398 | { |
| 2399 | gsi_replace (gsi, gimple_build_nop (), false); |
| 2400 | return; |
| 2401 | } |
| 2402 | |
| 2403 | if (gimple_omp_task_taskloop_p (g: stmt)) |
| 2404 | add_taskreg_looptemp_clauses (msk: GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx); |
| 2405 | |
| 2406 | ctx = new_omp_context (stmt, outer_ctx); |
| 2407 | |
| 2408 | if (gimple_omp_task_taskwait_p (g: stmt)) |
| 2409 | { |
| 2410 | scan_sharing_clauses (clauses: gimple_omp_task_clauses (gs: stmt), ctx); |
| 2411 | return; |
| 2412 | } |
| 2413 | |
| 2414 | taskreg_contexts.safe_push (obj: ctx); |
| 2415 | if (taskreg_nesting_level > 1) |
| 2416 | ctx->is_nested = true; |
| 2417 | ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 2418 | ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 2419 | name = create_tmp_var_name (".omp_data_s" ); |
| 2420 | name = build_decl (gimple_location (g: stmt), |
| 2421 | TYPE_DECL, name, ctx->record_type); |
| 2422 | DECL_ARTIFICIAL (name) = 1; |
| 2423 | DECL_NAMELESS (name) = 1; |
| 2424 | TYPE_NAME (ctx->record_type) = name; |
| 2425 | TYPE_ARTIFICIAL (ctx->record_type) = 1; |
| 2426 | create_omp_child_function (ctx, task_copy: false); |
| 2427 | gimple_omp_task_set_child_fn (gs: stmt, child_fn: ctx->cb.dst_fn); |
| 2428 | |
| 2429 | scan_sharing_clauses (clauses: gimple_omp_task_clauses (gs: stmt), ctx); |
| 2430 | |
| 2431 | if (ctx->srecord_type) |
| 2432 | { |
| 2433 | name = create_tmp_var_name (".omp_data_a" ); |
| 2434 | name = build_decl (gimple_location (g: stmt), |
| 2435 | TYPE_DECL, name, ctx->srecord_type); |
| 2436 | DECL_ARTIFICIAL (name) = 1; |
| 2437 | DECL_NAMELESS (name) = 1; |
| 2438 | TYPE_NAME (ctx->srecord_type) = name; |
| 2439 | TYPE_ARTIFICIAL (ctx->srecord_type) = 1; |
| 2440 | create_omp_child_function (ctx, task_copy: true); |
| 2441 | } |
| 2442 | |
| 2443 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 2444 | |
| 2445 | if (TYPE_FIELDS (ctx->record_type) == NULL) |
| 2446 | { |
| 2447 | ctx->record_type = ctx->receiver_decl = NULL; |
| 2448 | t = build_int_cst (long_integer_type_node, 0); |
| 2449 | gimple_omp_task_set_arg_size (gs: stmt, arg_size: t); |
| 2450 | t = build_int_cst (long_integer_type_node, 1); |
| 2451 | gimple_omp_task_set_arg_align (gs: stmt, arg_align: t); |
| 2452 | } |
| 2453 | } |
| 2454 | |
| 2455 | /* Helper function for finish_taskreg_scan, called through walk_tree. |
| 2456 | If maybe_lookup_decl_in_outer_context returns non-NULL for some |
| 2457 | tree, replace it in the expression. */ |
| 2458 | |
| 2459 | static tree |
| 2460 | finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data) |
| 2461 | { |
| 2462 | if (VAR_P (*tp)) |
| 2463 | { |
| 2464 | omp_context *ctx = (omp_context *) data; |
| 2465 | tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx); |
| 2466 | if (t != *tp) |
| 2467 | { |
| 2468 | if (DECL_HAS_VALUE_EXPR_P (t)) |
| 2469 | t = unshare_expr (DECL_VALUE_EXPR (t)); |
| 2470 | *tp = t; |
| 2471 | } |
| 2472 | *walk_subtrees = 0; |
| 2473 | } |
| 2474 | else if (IS_TYPE_OR_DECL_P (*tp)) |
| 2475 | *walk_subtrees = 0; |
| 2476 | return NULL_TREE; |
| 2477 | } |
| 2478 | |
| 2479 | /* If any decls have been made addressable during scan_omp, |
| 2480 | adjust their fields if needed, and layout record types |
| 2481 | of parallel/task constructs. */ |
| 2482 | |
| 2483 | static void |
| 2484 | finish_taskreg_scan (omp_context *ctx) |
| 2485 | { |
| 2486 | if (ctx->record_type == NULL_TREE) |
| 2487 | return; |
| 2488 | |
| 2489 | /* If any make_addressable_vars were needed, verify all |
| 2490 | OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS} |
| 2491 | statements if use_pointer_for_field hasn't changed |
| 2492 | because of that. If it did, update field types now. */ |
| 2493 | if (make_addressable_vars) |
| 2494 | { |
| 2495 | tree c; |
| 2496 | |
| 2497 | for (c = gimple_omp_taskreg_clauses (gs: ctx->stmt); |
| 2498 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 2499 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
| 2500 | && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 2501 | { |
| 2502 | tree decl = OMP_CLAUSE_DECL (c); |
| 2503 | |
| 2504 | /* Global variables don't need to be copied, |
| 2505 | the receiver side will use them directly. */ |
| 2506 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))) |
| 2507 | continue; |
| 2508 | if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl)) |
| 2509 | || !use_pointer_for_field (decl, shared_ctx: ctx)) |
| 2510 | continue; |
| 2511 | tree field = lookup_field (var: decl, ctx); |
| 2512 | if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE |
| 2513 | && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl)) |
| 2514 | continue; |
| 2515 | TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl)); |
| 2516 | TREE_THIS_VOLATILE (field) = 0; |
| 2517 | DECL_USER_ALIGN (field) = 0; |
| 2518 | SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field))); |
| 2519 | if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field)) |
| 2520 | SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field)); |
| 2521 | if (ctx->srecord_type) |
| 2522 | { |
| 2523 | tree sfield = lookup_sfield (var: decl, ctx); |
| 2524 | TREE_TYPE (sfield) = TREE_TYPE (field); |
| 2525 | TREE_THIS_VOLATILE (sfield) = 0; |
| 2526 | DECL_USER_ALIGN (sfield) = 0; |
| 2527 | SET_DECL_ALIGN (sfield, DECL_ALIGN (field)); |
| 2528 | if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield)) |
| 2529 | SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield)); |
| 2530 | } |
| 2531 | } |
| 2532 | } |
| 2533 | |
| 2534 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_PARALLEL) |
| 2535 | { |
| 2536 | tree clauses = gimple_omp_parallel_clauses (gs: ctx->stmt); |
| 2537 | tree c = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_); |
| 2538 | if (c) |
| 2539 | { |
| 2540 | /* Move the _reductemp_ clause first. GOMP_parallel_reductions |
| 2541 | expects to find it at the start of data. */ |
| 2542 | tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx); |
| 2543 | tree *p = &TYPE_FIELDS (ctx->record_type); |
| 2544 | while (*p) |
| 2545 | if (*p == f) |
| 2546 | { |
| 2547 | *p = DECL_CHAIN (*p); |
| 2548 | break; |
| 2549 | } |
| 2550 | else |
| 2551 | p = &DECL_CHAIN (*p); |
| 2552 | DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type); |
| 2553 | TYPE_FIELDS (ctx->record_type) = f; |
| 2554 | } |
| 2555 | layout_type (ctx->record_type); |
| 2556 | fixup_child_record_type (ctx); |
| 2557 | } |
| 2558 | else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS) |
| 2559 | { |
| 2560 | layout_type (ctx->record_type); |
| 2561 | fixup_child_record_type (ctx); |
| 2562 | } |
| 2563 | else |
| 2564 | { |
| 2565 | location_t loc = gimple_location (g: ctx->stmt); |
| 2566 | tree *p, vla_fields = NULL_TREE, *q = &vla_fields; |
| 2567 | tree detach_clause |
| 2568 | = omp_find_clause (clauses: gimple_omp_task_clauses (gs: ctx->stmt), |
| 2569 | kind: OMP_CLAUSE_DETACH); |
| 2570 | /* Move VLA fields to the end. */ |
| 2571 | p = &TYPE_FIELDS (ctx->record_type); |
| 2572 | while (*p) |
| 2573 | if (!TYPE_SIZE_UNIT (TREE_TYPE (*p)) |
| 2574 | || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p)))) |
| 2575 | { |
| 2576 | *q = *p; |
| 2577 | *p = TREE_CHAIN (*p); |
| 2578 | TREE_CHAIN (*q) = NULL_TREE; |
| 2579 | q = &TREE_CHAIN (*q); |
| 2580 | } |
| 2581 | else |
| 2582 | p = &DECL_CHAIN (*p); |
| 2583 | *p = vla_fields; |
| 2584 | if (gimple_omp_task_taskloop_p (g: ctx->stmt)) |
| 2585 | { |
| 2586 | /* Move fields corresponding to first and second _looptemp_ |
| 2587 | clause first. There are filled by GOMP_taskloop |
| 2588 | and thus need to be in specific positions. */ |
| 2589 | tree clauses = gimple_omp_task_clauses (gs: ctx->stmt); |
| 2590 | tree c1 = omp_find_clause (clauses, kind: OMP_CLAUSE__LOOPTEMP_); |
| 2591 | tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1), |
| 2592 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 2593 | tree c3 = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_); |
| 2594 | tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx); |
| 2595 | tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx); |
| 2596 | tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE; |
| 2597 | p = &TYPE_FIELDS (ctx->record_type); |
| 2598 | while (*p) |
| 2599 | if (*p == f1 || *p == f2 || *p == f3) |
| 2600 | *p = DECL_CHAIN (*p); |
| 2601 | else |
| 2602 | p = &DECL_CHAIN (*p); |
| 2603 | DECL_CHAIN (f1) = f2; |
| 2604 | if (c3) |
| 2605 | { |
| 2606 | DECL_CHAIN (f2) = f3; |
| 2607 | DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type); |
| 2608 | } |
| 2609 | else |
| 2610 | DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type); |
| 2611 | TYPE_FIELDS (ctx->record_type) = f1; |
| 2612 | if (ctx->srecord_type) |
| 2613 | { |
| 2614 | f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx); |
| 2615 | f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx); |
| 2616 | if (c3) |
| 2617 | f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx); |
| 2618 | p = &TYPE_FIELDS (ctx->srecord_type); |
| 2619 | while (*p) |
| 2620 | if (*p == f1 || *p == f2 || *p == f3) |
| 2621 | *p = DECL_CHAIN (*p); |
| 2622 | else |
| 2623 | p = &DECL_CHAIN (*p); |
| 2624 | DECL_CHAIN (f1) = f2; |
| 2625 | DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type); |
| 2626 | if (c3) |
| 2627 | { |
| 2628 | DECL_CHAIN (f2) = f3; |
| 2629 | DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type); |
| 2630 | } |
| 2631 | else |
| 2632 | DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type); |
| 2633 | TYPE_FIELDS (ctx->srecord_type) = f1; |
| 2634 | } |
| 2635 | } |
| 2636 | if (detach_clause) |
| 2637 | { |
| 2638 | tree c, field; |
| 2639 | |
| 2640 | /* Look for a firstprivate clause with the detach event handle. */ |
| 2641 | for (c = gimple_omp_taskreg_clauses (gs: ctx->stmt); |
| 2642 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 2643 | { |
| 2644 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) |
| 2645 | continue; |
| 2646 | if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx) |
| 2647 | == OMP_CLAUSE_DECL (detach_clause)) |
| 2648 | break; |
| 2649 | } |
| 2650 | |
| 2651 | gcc_assert (c); |
| 2652 | field = lookup_field (OMP_CLAUSE_DECL (c), ctx); |
| 2653 | |
| 2654 | /* Move field corresponding to the detach clause first. |
| 2655 | This is filled by GOMP_task and needs to be in a |
| 2656 | specific position. */ |
| 2657 | p = &TYPE_FIELDS (ctx->record_type); |
| 2658 | while (*p) |
| 2659 | if (*p == field) |
| 2660 | *p = DECL_CHAIN (*p); |
| 2661 | else |
| 2662 | p = &DECL_CHAIN (*p); |
| 2663 | DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type); |
| 2664 | TYPE_FIELDS (ctx->record_type) = field; |
| 2665 | if (ctx->srecord_type) |
| 2666 | { |
| 2667 | field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx); |
| 2668 | p = &TYPE_FIELDS (ctx->srecord_type); |
| 2669 | while (*p) |
| 2670 | if (*p == field) |
| 2671 | *p = DECL_CHAIN (*p); |
| 2672 | else |
| 2673 | p = &DECL_CHAIN (*p); |
| 2674 | DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type); |
| 2675 | TYPE_FIELDS (ctx->srecord_type) = field; |
| 2676 | } |
| 2677 | } |
| 2678 | layout_type (ctx->record_type); |
| 2679 | fixup_child_record_type (ctx); |
| 2680 | if (ctx->srecord_type) |
| 2681 | layout_type (ctx->srecord_type); |
| 2682 | tree t = fold_convert_loc (loc, long_integer_type_node, |
| 2683 | TYPE_SIZE_UNIT (ctx->record_type)); |
| 2684 | if (TREE_CODE (t) != INTEGER_CST) |
| 2685 | { |
| 2686 | t = unshare_expr (t); |
| 2687 | walk_tree (&t, finish_taskreg_remap, ctx, NULL); |
| 2688 | } |
| 2689 | gimple_omp_task_set_arg_size (gs: ctx->stmt, arg_size: t); |
| 2690 | t = build_int_cst (long_integer_type_node, |
| 2691 | TYPE_ALIGN_UNIT (ctx->record_type)); |
| 2692 | gimple_omp_task_set_arg_align (gs: ctx->stmt, arg_align: t); |
| 2693 | } |
| 2694 | } |
| 2695 | |
| 2696 | /* Find the enclosing offload context. */ |
| 2697 | |
| 2698 | static omp_context * |
| 2699 | enclosing_target_ctx (omp_context *ctx) |
| 2700 | { |
| 2701 | for (; ctx; ctx = ctx->outer) |
| 2702 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET) |
| 2703 | break; |
| 2704 | |
| 2705 | return ctx; |
| 2706 | } |
| 2707 | |
| 2708 | /* Return whether CTX's parent compute construct is an OpenACC 'kernels' |
| 2709 | construct. |
| 2710 | (This doesn't include OpenACC 'kernels' decomposed parts.) */ |
| 2711 | |
| 2712 | static bool |
| 2713 | ctx_in_oacc_kernels_region (omp_context *ctx) |
| 2714 | { |
| 2715 | for (;ctx != NULL; ctx = ctx->outer) |
| 2716 | { |
| 2717 | gimple *stmt = ctx->stmt; |
| 2718 | if (gimple_code (g: stmt) == GIMPLE_OMP_TARGET |
| 2719 | && gimple_omp_target_kind (g: stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS) |
| 2720 | return true; |
| 2721 | } |
| 2722 | |
| 2723 | return false; |
| 2724 | } |
| 2725 | |
| 2726 | /* Check the parallelism clauses inside a OpenACC 'kernels' region. |
| 2727 | (This doesn't include OpenACC 'kernels' decomposed parts.) |
| 2728 | Until kernels handling moves to use the same loop indirection |
| 2729 | scheme as parallel, we need to do this checking early. */ |
| 2730 | |
| 2731 | static unsigned |
| 2732 | check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx) |
| 2733 | { |
| 2734 | bool checking = true; |
| 2735 | unsigned outer_mask = 0; |
| 2736 | unsigned this_mask = 0; |
| 2737 | bool has_seq = false, has_auto = false; |
| 2738 | |
| 2739 | if (ctx->outer) |
| 2740 | outer_mask = check_oacc_kernel_gwv (NULL, ctx: ctx->outer); |
| 2741 | if (!stmt) |
| 2742 | { |
| 2743 | checking = false; |
| 2744 | if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR) |
| 2745 | return outer_mask; |
| 2746 | stmt = as_a <gomp_for *> (p: ctx->stmt); |
| 2747 | } |
| 2748 | |
| 2749 | for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 2750 | { |
| 2751 | switch (OMP_CLAUSE_CODE (c)) |
| 2752 | { |
| 2753 | case OMP_CLAUSE_GANG: |
| 2754 | this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG); |
| 2755 | break; |
| 2756 | case OMP_CLAUSE_WORKER: |
| 2757 | this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER); |
| 2758 | break; |
| 2759 | case OMP_CLAUSE_VECTOR: |
| 2760 | this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR); |
| 2761 | break; |
| 2762 | case OMP_CLAUSE_SEQ: |
| 2763 | has_seq = true; |
| 2764 | break; |
| 2765 | case OMP_CLAUSE_AUTO: |
| 2766 | has_auto = true; |
| 2767 | break; |
| 2768 | default: |
| 2769 | break; |
| 2770 | } |
| 2771 | } |
| 2772 | |
| 2773 | if (checking) |
| 2774 | { |
| 2775 | if (has_seq && (this_mask || has_auto)) |
| 2776 | error_at (gimple_location (g: stmt), "%<seq%> overrides other" |
| 2777 | " OpenACC loop specifiers" ); |
| 2778 | else if (has_auto && this_mask) |
| 2779 | error_at (gimple_location (g: stmt), "%<auto%> conflicts with other" |
| 2780 | " OpenACC loop specifiers" ); |
| 2781 | |
| 2782 | if (this_mask & outer_mask) |
| 2783 | error_at (gimple_location (g: stmt), "inner loop uses same" |
| 2784 | " OpenACC parallelism as containing loop" ); |
| 2785 | } |
| 2786 | |
| 2787 | return outer_mask | this_mask; |
| 2788 | } |
| 2789 | |
| 2790 | /* Scan a GIMPLE_OMP_FOR. */ |
| 2791 | |
| 2792 | static omp_context * |
| 2793 | scan_omp_for (gomp_for *stmt, omp_context *outer_ctx) |
| 2794 | { |
| 2795 | omp_context *ctx; |
| 2796 | size_t i; |
| 2797 | tree clauses = gimple_omp_for_clauses (gs: stmt); |
| 2798 | |
| 2799 | ctx = new_omp_context (stmt, outer_ctx); |
| 2800 | |
| 2801 | if (is_gimple_omp_oacc (stmt)) |
| 2802 | { |
| 2803 | omp_context *tgt = enclosing_target_ctx (ctx: outer_ctx); |
| 2804 | |
| 2805 | if (!(tgt && is_oacc_kernels (ctx: tgt))) |
| 2806 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 2807 | { |
| 2808 | tree c_op0; |
| 2809 | switch (OMP_CLAUSE_CODE (c)) |
| 2810 | { |
| 2811 | case OMP_CLAUSE_GANG: |
| 2812 | c_op0 = OMP_CLAUSE_GANG_EXPR (c); |
| 2813 | break; |
| 2814 | |
| 2815 | case OMP_CLAUSE_WORKER: |
| 2816 | c_op0 = OMP_CLAUSE_WORKER_EXPR (c); |
| 2817 | break; |
| 2818 | |
| 2819 | case OMP_CLAUSE_VECTOR: |
| 2820 | c_op0 = OMP_CLAUSE_VECTOR_EXPR (c); |
| 2821 | break; |
| 2822 | |
| 2823 | default: |
| 2824 | continue; |
| 2825 | } |
| 2826 | |
| 2827 | if (c_op0) |
| 2828 | { |
| 2829 | /* By construction, this is impossible for OpenACC 'kernels' |
| 2830 | decomposed parts. */ |
| 2831 | gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt))); |
| 2832 | |
| 2833 | error_at (OMP_CLAUSE_LOCATION (c), |
| 2834 | "argument not permitted on %qs clause" , |
| 2835 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
| 2836 | if (tgt) |
| 2837 | inform (gimple_location (g: tgt->stmt), |
| 2838 | "enclosing parent compute construct" ); |
| 2839 | else if (oacc_get_fn_attrib (fn: current_function_decl)) |
| 2840 | inform (DECL_SOURCE_LOCATION (current_function_decl), |
| 2841 | "enclosing routine" ); |
| 2842 | else |
| 2843 | gcc_unreachable (); |
| 2844 | } |
| 2845 | } |
| 2846 | |
| 2847 | if (tgt && is_oacc_kernels (ctx: tgt)) |
| 2848 | check_oacc_kernel_gwv (stmt, ctx); |
| 2849 | |
| 2850 | /* Collect all variables named in reductions on this loop. Ensure |
| 2851 | that, if this loop has a reduction on some variable v, and there is |
| 2852 | a reduction on v somewhere in an outer context, then there is a |
| 2853 | reduction on v on all intervening loops as well. */ |
| 2854 | tree local_reduction_clauses = NULL; |
| 2855 | for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 2856 | { |
| 2857 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
| 2858 | local_reduction_clauses |
| 2859 | = tree_cons (NULL, c, local_reduction_clauses); |
| 2860 | } |
| 2861 | if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL) |
| 2862 | ctx->outer_reduction_clauses |
| 2863 | = chainon (unshare_expr (ctx->outer->local_reduction_clauses), |
| 2864 | ctx->outer->outer_reduction_clauses); |
| 2865 | tree outer_reduction_clauses = ctx->outer_reduction_clauses; |
| 2866 | tree local_iter = local_reduction_clauses; |
| 2867 | for (; local_iter; local_iter = TREE_CHAIN (local_iter)) |
| 2868 | { |
| 2869 | tree local_clause = TREE_VALUE (local_iter); |
| 2870 | tree local_var = OMP_CLAUSE_DECL (local_clause); |
| 2871 | tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause); |
| 2872 | bool have_outer_reduction = false; |
| 2873 | tree ctx_iter = outer_reduction_clauses; |
| 2874 | for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter)) |
| 2875 | { |
| 2876 | tree outer_clause = TREE_VALUE (ctx_iter); |
| 2877 | tree outer_var = OMP_CLAUSE_DECL (outer_clause); |
| 2878 | tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause); |
| 2879 | if (outer_var == local_var && outer_op != local_op) |
| 2880 | { |
| 2881 | if (warning_at (OMP_CLAUSE_LOCATION (local_clause), |
| 2882 | OPT_Wopenmp, "conflicting reduction " |
| 2883 | "operations for %qE" , |
| 2884 | local_var)) |
| 2885 | inform (OMP_CLAUSE_LOCATION (outer_clause), |
| 2886 | "location of the previous reduction for %qE" , |
| 2887 | outer_var); |
| 2888 | } |
| 2889 | if (outer_var == local_var) |
| 2890 | { |
| 2891 | have_outer_reduction = true; |
| 2892 | break; |
| 2893 | } |
| 2894 | } |
| 2895 | if (have_outer_reduction) |
| 2896 | { |
| 2897 | /* There is a reduction on outer_var both on this loop and on |
| 2898 | some enclosing loop. Walk up the context tree until such a |
| 2899 | loop with a reduction on outer_var is found, and complain |
| 2900 | about all intervening loops that do not have such a |
| 2901 | reduction. */ |
| 2902 | struct omp_context *curr_loop = ctx->outer; |
| 2903 | bool found = false; |
| 2904 | while (curr_loop != NULL) |
| 2905 | { |
| 2906 | tree curr_iter = curr_loop->local_reduction_clauses; |
| 2907 | for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter)) |
| 2908 | { |
| 2909 | tree curr_clause = TREE_VALUE (curr_iter); |
| 2910 | tree curr_var = OMP_CLAUSE_DECL (curr_clause); |
| 2911 | if (curr_var == local_var) |
| 2912 | { |
| 2913 | found = true; |
| 2914 | break; |
| 2915 | } |
| 2916 | } |
| 2917 | if (!found) |
| 2918 | warning_at (gimple_location (g: curr_loop->stmt), OPT_Wopenmp, |
| 2919 | "nested loop in reduction needs " |
| 2920 | "reduction clause for %qE" , |
| 2921 | local_var); |
| 2922 | else |
| 2923 | break; |
| 2924 | curr_loop = curr_loop->outer; |
| 2925 | } |
| 2926 | } |
| 2927 | } |
| 2928 | ctx->local_reduction_clauses = local_reduction_clauses; |
| 2929 | ctx->outer_reduction_clauses |
| 2930 | = chainon (unshare_expr (ctx->local_reduction_clauses), |
| 2931 | ctx->outer_reduction_clauses); |
| 2932 | |
| 2933 | if (tgt && is_oacc_kernels (ctx: tgt)) |
| 2934 | { |
| 2935 | /* Strip out reductions, as they are not handled yet. */ |
| 2936 | tree *prev_ptr = &clauses; |
| 2937 | |
| 2938 | while (tree probe = *prev_ptr) |
| 2939 | { |
| 2940 | tree *next_ptr = &OMP_CLAUSE_CHAIN (probe); |
| 2941 | |
| 2942 | if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION) |
| 2943 | *prev_ptr = *next_ptr; |
| 2944 | else |
| 2945 | prev_ptr = next_ptr; |
| 2946 | } |
| 2947 | |
| 2948 | gimple_omp_for_set_clauses (gs: stmt, clauses); |
| 2949 | } |
| 2950 | } |
| 2951 | |
| 2952 | scan_sharing_clauses (clauses, ctx); |
| 2953 | |
| 2954 | scan_omp (gimple_omp_for_pre_body_ptr (gs: stmt), ctx); |
| 2955 | for (i = 0; i < gimple_omp_for_collapse (gs: stmt); i++) |
| 2956 | { |
| 2957 | scan_omp_op (tp: gimple_omp_for_index_ptr (gs: stmt, i), ctx); |
| 2958 | scan_omp_op (tp: gimple_omp_for_initial_ptr (gs: stmt, i), ctx); |
| 2959 | scan_omp_op (tp: gimple_omp_for_final_ptr (gs: stmt, i), ctx); |
| 2960 | scan_omp_op (tp: gimple_omp_for_incr_ptr (gs: stmt, i), ctx); |
| 2961 | } |
| 2962 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 2963 | return ctx; |
| 2964 | } |
| 2965 | |
| 2966 | /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */ |
| 2967 | |
| 2968 | static void |
| 2969 | scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt, |
| 2970 | omp_context *outer_ctx) |
| 2971 | { |
| 2972 | gbind *bind = gimple_build_bind (NULL, NULL, NULL); |
| 2973 | gsi_replace (gsi, bind, false); |
| 2974 | gimple_seq seq = NULL; |
| 2975 | gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0); |
| 2976 | tree cond = create_tmp_var_raw (integer_type_node); |
| 2977 | DECL_CONTEXT (cond) = current_function_decl; |
| 2978 | DECL_SEEN_IN_BIND_EXPR_P (cond) = 1; |
| 2979 | gimple_bind_set_vars (bind_stmt: bind, vars: cond); |
| 2980 | gimple_call_set_lhs (gs: g, lhs: cond); |
| 2981 | gimple_seq_add_stmt (&seq, g); |
| 2982 | tree lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 2983 | tree lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 2984 | tree lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 2985 | g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2); |
| 2986 | gimple_seq_add_stmt (&seq, g); |
| 2987 | g = gimple_build_label (label: lab1); |
| 2988 | gimple_seq_add_stmt (&seq, g); |
| 2989 | gimple_seq new_seq = copy_gimple_seq_and_replace_locals (seq: stmt); |
| 2990 | gomp_for *new_stmt = as_a <gomp_for *> (p: new_seq); |
| 2991 | tree clause = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE__SIMT_); |
| 2992 | OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (gs: new_stmt); |
| 2993 | gimple_omp_for_set_clauses (gs: new_stmt, clauses: clause); |
| 2994 | gimple_seq_add_stmt (&seq, new_stmt); |
| 2995 | g = gimple_build_goto (dest: lab3); |
| 2996 | gimple_seq_add_stmt (&seq, g); |
| 2997 | g = gimple_build_label (label: lab2); |
| 2998 | gimple_seq_add_stmt (&seq, g); |
| 2999 | gimple_seq_add_stmt (&seq, stmt); |
| 3000 | g = gimple_build_label (label: lab3); |
| 3001 | gimple_seq_add_stmt (&seq, g); |
| 3002 | gimple_bind_set_body (bind_stmt: bind, seq); |
| 3003 | update_stmt (s: bind); |
| 3004 | scan_omp_for (stmt: new_stmt, outer_ctx); |
| 3005 | scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt; |
| 3006 | } |
| 3007 | |
| 3008 | static tree omp_find_scan (gimple_stmt_iterator *, bool *, |
| 3009 | struct walk_stmt_info *); |
| 3010 | static omp_context *maybe_lookup_ctx (gimple *); |
| 3011 | |
| 3012 | /* Duplicate #pragma omp simd, one for the scan input phase loop and one |
| 3013 | for scan phase loop. */ |
| 3014 | |
| 3015 | static void |
| 3016 | scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt, |
| 3017 | omp_context *outer_ctx) |
| 3018 | { |
| 3019 | /* The only change between inclusive and exclusive scan will be |
| 3020 | within the first simd loop, so just use inclusive in the |
| 3021 | worksharing loop. */ |
| 3022 | outer_ctx->scan_inclusive = true; |
| 3023 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE); |
| 3024 | OMP_CLAUSE_DECL (c) = integer_zero_node; |
| 3025 | |
| 3026 | gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE); |
| 3027 | gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c); |
| 3028 | gsi_replace (gsi, input_stmt, false); |
| 3029 | gimple_seq input_body = NULL; |
| 3030 | gimple_seq_add_stmt (&input_body, stmt); |
| 3031 | gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT); |
| 3032 | |
| 3033 | gimple_stmt_iterator input1_gsi = gsi_none (); |
| 3034 | struct walk_stmt_info wi; |
| 3035 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 3036 | wi.val_only = true; |
| 3037 | wi.info = (void *) &input1_gsi; |
| 3038 | walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), omp_find_scan, NULL, &wi); |
| 3039 | gcc_assert (!gsi_end_p (input1_gsi)); |
| 3040 | |
| 3041 | gimple *input_stmt1 = gsi_stmt (i: input1_gsi); |
| 3042 | gsi_next (i: &input1_gsi); |
| 3043 | gimple *scan_stmt1 = gsi_stmt (i: input1_gsi); |
| 3044 | gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN); |
| 3045 | c = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: scan_stmt1)); |
| 3046 | if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE) |
| 3047 | std::swap (a&: input_stmt1, b&: scan_stmt1); |
| 3048 | |
| 3049 | gimple_seq input_body1 = gimple_omp_body (gs: input_stmt1); |
| 3050 | gimple_omp_set_body (gs: input_stmt1, NULL); |
| 3051 | |
| 3052 | gimple_seq scan_body = copy_gimple_seq_and_replace_locals (seq: stmt); |
| 3053 | gomp_for *new_stmt = as_a <gomp_for *> (p: scan_body); |
| 3054 | |
| 3055 | gimple_omp_set_body (gs: input_stmt1, body: input_body1); |
| 3056 | gimple_omp_set_body (gs: scan_stmt1, NULL); |
| 3057 | |
| 3058 | gimple_stmt_iterator input2_gsi = gsi_none (); |
| 3059 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 3060 | wi.val_only = true; |
| 3061 | wi.info = (void *) &input2_gsi; |
| 3062 | walk_gimple_seq_mod (gimple_omp_body_ptr (gs: new_stmt), omp_find_scan, |
| 3063 | NULL, &wi); |
| 3064 | gcc_assert (!gsi_end_p (input2_gsi)); |
| 3065 | |
| 3066 | gimple *input_stmt2 = gsi_stmt (i: input2_gsi); |
| 3067 | gsi_next (i: &input2_gsi); |
| 3068 | gimple *scan_stmt2 = gsi_stmt (i: input2_gsi); |
| 3069 | gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN); |
| 3070 | if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE) |
| 3071 | std::swap (a&: input_stmt2, b&: scan_stmt2); |
| 3072 | |
| 3073 | gimple_omp_set_body (gs: input_stmt2, NULL); |
| 3074 | |
| 3075 | gimple_omp_set_body (gs: input_stmt, body: input_body); |
| 3076 | gimple_omp_set_body (gs: scan_stmt, body: scan_body); |
| 3077 | |
| 3078 | omp_context *ctx = new_omp_context (stmt: input_stmt, outer_ctx); |
| 3079 | scan_omp (gimple_omp_body_ptr (gs: input_stmt), ctx); |
| 3080 | |
| 3081 | ctx = new_omp_context (stmt: scan_stmt, outer_ctx); |
| 3082 | scan_omp (gimple_omp_body_ptr (gs: scan_stmt), ctx); |
| 3083 | |
| 3084 | maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true; |
| 3085 | } |
| 3086 | |
| 3087 | /* Scan an OpenMP sections directive. */ |
| 3088 | |
| 3089 | static void |
| 3090 | scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx) |
| 3091 | { |
| 3092 | omp_context *ctx; |
| 3093 | |
| 3094 | ctx = new_omp_context (stmt, outer_ctx); |
| 3095 | scan_sharing_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), ctx); |
| 3096 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 3097 | } |
| 3098 | |
| 3099 | /* Scan an OpenMP single directive. */ |
| 3100 | |
| 3101 | static void |
| 3102 | scan_omp_single (gomp_single *stmt, omp_context *outer_ctx) |
| 3103 | { |
| 3104 | omp_context *ctx; |
| 3105 | tree name; |
| 3106 | |
| 3107 | ctx = new_omp_context (stmt, outer_ctx); |
| 3108 | ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 3109 | ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 3110 | name = create_tmp_var_name (".omp_copy_s" ); |
| 3111 | name = build_decl (gimple_location (g: stmt), |
| 3112 | TYPE_DECL, name, ctx->record_type); |
| 3113 | TYPE_NAME (ctx->record_type) = name; |
| 3114 | |
| 3115 | scan_sharing_clauses (clauses: gimple_omp_single_clauses (gs: stmt), ctx); |
| 3116 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 3117 | |
| 3118 | if (TYPE_FIELDS (ctx->record_type) == NULL) |
| 3119 | ctx->record_type = NULL; |
| 3120 | else |
| 3121 | layout_type (ctx->record_type); |
| 3122 | } |
| 3123 | |
| 3124 | /* Scan a GIMPLE_OMP_TARGET. */ |
| 3125 | |
| 3126 | static void |
| 3127 | scan_omp_target (gomp_target *stmt, omp_context *outer_ctx) |
| 3128 | { |
| 3129 | omp_context *ctx; |
| 3130 | tree name; |
| 3131 | bool offloaded = is_gimple_omp_offloaded (stmt); |
| 3132 | tree clauses = gimple_omp_target_clauses (gs: stmt); |
| 3133 | |
| 3134 | ctx = new_omp_context (stmt, outer_ctx); |
| 3135 | ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 3136 | ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 3137 | name = create_tmp_var_name (".omp_data_t" ); |
| 3138 | name = build_decl (gimple_location (g: stmt), |
| 3139 | TYPE_DECL, name, ctx->record_type); |
| 3140 | DECL_ARTIFICIAL (name) = 1; |
| 3141 | DECL_NAMELESS (name) = 1; |
| 3142 | TYPE_NAME (ctx->record_type) = name; |
| 3143 | TYPE_ARTIFICIAL (ctx->record_type) = 1; |
| 3144 | |
| 3145 | if (offloaded) |
| 3146 | { |
| 3147 | create_omp_child_function (ctx, task_copy: false); |
| 3148 | gimple_omp_target_set_child_fn (omp_target_stmt: stmt, child_fn: ctx->cb.dst_fn); |
| 3149 | } |
| 3150 | |
| 3151 | scan_sharing_clauses (clauses, ctx); |
| 3152 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 3153 | |
| 3154 | if (TYPE_FIELDS (ctx->record_type) == NULL) |
| 3155 | ctx->record_type = ctx->receiver_decl = NULL; |
| 3156 | else |
| 3157 | { |
| 3158 | TYPE_FIELDS (ctx->record_type) |
| 3159 | = nreverse (TYPE_FIELDS (ctx->record_type)); |
| 3160 | if (flag_checking) |
| 3161 | { |
| 3162 | unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type)); |
| 3163 | for (tree field = TYPE_FIELDS (ctx->record_type); |
| 3164 | field; |
| 3165 | field = DECL_CHAIN (field)) |
| 3166 | gcc_assert (DECL_ALIGN (field) == align); |
| 3167 | } |
| 3168 | layout_type (ctx->record_type); |
| 3169 | if (offloaded) |
| 3170 | fixup_child_record_type (ctx); |
| 3171 | } |
| 3172 | |
| 3173 | if (ctx->teams_nested_p && ctx->nonteams_nested_p) |
| 3174 | { |
| 3175 | error_at (gimple_location (g: stmt), |
| 3176 | "%<target%> construct with nested %<teams%> construct " |
| 3177 | "contains directives outside of the %<teams%> construct" ); |
| 3178 | gimple_omp_set_body (gs: stmt, body: gimple_build_bind (NULL, NULL, NULL)); |
| 3179 | } |
| 3180 | } |
| 3181 | |
| 3182 | /* Scan an OpenMP teams directive. */ |
| 3183 | |
| 3184 | static void |
| 3185 | scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx) |
| 3186 | { |
| 3187 | omp_context *ctx = new_omp_context (stmt, outer_ctx); |
| 3188 | |
| 3189 | if (!gimple_omp_teams_host (omp_teams_stmt: stmt)) |
| 3190 | { |
| 3191 | scan_sharing_clauses (clauses: gimple_omp_teams_clauses (gs: stmt), ctx); |
| 3192 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 3193 | return; |
| 3194 | } |
| 3195 | taskreg_contexts.safe_push (obj: ctx); |
| 3196 | gcc_assert (taskreg_nesting_level == 1); |
| 3197 | ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 3198 | ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 3199 | tree name = create_tmp_var_name (".omp_data_s" ); |
| 3200 | name = build_decl (gimple_location (g: stmt), |
| 3201 | TYPE_DECL, name, ctx->record_type); |
| 3202 | DECL_ARTIFICIAL (name) = 1; |
| 3203 | DECL_NAMELESS (name) = 1; |
| 3204 | TYPE_NAME (ctx->record_type) = name; |
| 3205 | TYPE_ARTIFICIAL (ctx->record_type) = 1; |
| 3206 | create_omp_child_function (ctx, task_copy: false); |
| 3207 | gimple_omp_teams_set_child_fn (omp_teams_stmt: stmt, child_fn: ctx->cb.dst_fn); |
| 3208 | |
| 3209 | scan_sharing_clauses (clauses: gimple_omp_teams_clauses (gs: stmt), ctx); |
| 3210 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 3211 | |
| 3212 | if (TYPE_FIELDS (ctx->record_type) == NULL) |
| 3213 | ctx->record_type = ctx->receiver_decl = NULL; |
| 3214 | } |
| 3215 | |
| 3216 | /* Check nesting restrictions. */ |
| 3217 | static bool |
| 3218 | check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx) |
| 3219 | { |
| 3220 | tree c; |
| 3221 | |
| 3222 | /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin) |
| 3223 | inside an OpenACC CTX. */ |
| 3224 | if (gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD |
| 3225 | || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE) |
| 3226 | /* ..., except for the atomic codes that OpenACC shares with OpenMP. */ |
| 3227 | ; |
| 3228 | else if (!(is_gimple_omp (stmt) |
| 3229 | && is_gimple_omp_oacc (stmt))) |
| 3230 | { |
| 3231 | if (oacc_get_fn_attrib (cfun->decl) != NULL) |
| 3232 | { |
| 3233 | error_at (gimple_location (g: stmt), |
| 3234 | "non-OpenACC construct inside of OpenACC routine" ); |
| 3235 | return false; |
| 3236 | } |
| 3237 | else |
| 3238 | for (omp_context *octx = ctx; octx != NULL; octx = octx->outer) |
| 3239 | if (is_gimple_omp (stmt: octx->stmt) |
| 3240 | && is_gimple_omp_oacc (stmt: octx->stmt)) |
| 3241 | { |
| 3242 | error_at (gimple_location (g: stmt), |
| 3243 | "non-OpenACC construct inside of OpenACC region" ); |
| 3244 | return false; |
| 3245 | } |
| 3246 | } |
| 3247 | |
| 3248 | if (ctx != NULL) |
| 3249 | { |
| 3250 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET |
| 3251 | && gimple_omp_target_kind (g: ctx->stmt) == GF_OMP_TARGET_KIND_REGION) |
| 3252 | { |
| 3253 | c = omp_find_clause (clauses: gimple_omp_target_clauses (gs: ctx->stmt), |
| 3254 | kind: OMP_CLAUSE_DEVICE); |
| 3255 | if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c)) |
| 3256 | { |
| 3257 | error_at (gimple_location (g: stmt), |
| 3258 | "OpenMP constructs are not allowed in target region " |
| 3259 | "with %<ancestor%>" ); |
| 3260 | return false; |
| 3261 | } |
| 3262 | |
| 3263 | if (gimple_code (g: stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p) |
| 3264 | ctx->teams_nested_p = true; |
| 3265 | else |
| 3266 | ctx->nonteams_nested_p = true; |
| 3267 | } |
| 3268 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SCAN |
| 3269 | && ctx->outer |
| 3270 | && gimple_code (g: ctx->outer->stmt) == GIMPLE_OMP_FOR) |
| 3271 | ctx = ctx->outer; |
| 3272 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 3273 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD |
| 3274 | && !ctx->loop_p) |
| 3275 | { |
| 3276 | c = NULL_TREE; |
| 3277 | if (ctx->order_concurrent |
| 3278 | && (gimple_code (g: stmt) == GIMPLE_OMP_ORDERED |
| 3279 | || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD |
| 3280 | || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE)) |
| 3281 | { |
| 3282 | error_at (gimple_location (g: stmt), |
| 3283 | "OpenMP constructs other than %<parallel%>, %<loop%>" |
| 3284 | " or %<simd%> may not be nested inside a region with" |
| 3285 | " the %<order(concurrent)%> clause" ); |
| 3286 | return false; |
| 3287 | } |
| 3288 | if (gimple_code (g: stmt) == GIMPLE_OMP_ORDERED) |
| 3289 | { |
| 3290 | c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt)); |
| 3291 | if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_SIMD)) |
| 3292 | { |
| 3293 | if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_THREADS) |
| 3294 | && (ctx->outer == NULL |
| 3295 | || !gimple_omp_for_combined_into_p (g: ctx->stmt) |
| 3296 | || gimple_code (g: ctx->outer->stmt) != GIMPLE_OMP_FOR |
| 3297 | || (gimple_omp_for_kind (g: ctx->outer->stmt) |
| 3298 | != GF_OMP_FOR_KIND_FOR) |
| 3299 | || !gimple_omp_for_combined_p (g: ctx->outer->stmt))) |
| 3300 | { |
| 3301 | error_at (gimple_location (g: stmt), |
| 3302 | "%<ordered simd threads%> must be closely " |
| 3303 | "nested inside of %<%s simd%> region" , |
| 3304 | lang_GNU_Fortran () ? "do" : "for" ); |
| 3305 | return false; |
| 3306 | } |
| 3307 | return true; |
| 3308 | } |
| 3309 | } |
| 3310 | else if (gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD |
| 3311 | || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE |
| 3312 | || gimple_code (g: stmt) == GIMPLE_OMP_SCAN |
| 3313 | || gimple_code (g: stmt) == GIMPLE_OMP_STRUCTURED_BLOCK) |
| 3314 | return true; |
| 3315 | else if (gimple_code (g: stmt) == GIMPLE_OMP_FOR |
| 3316 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD) |
| 3317 | return true; |
| 3318 | error_at (gimple_location (g: stmt), |
| 3319 | "OpenMP constructs other than " |
| 3320 | "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may " |
| 3321 | "not be nested inside %<simd%> region" ); |
| 3322 | return false; |
| 3323 | } |
| 3324 | else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS) |
| 3325 | { |
| 3326 | if ((gimple_code (g: stmt) != GIMPLE_OMP_FOR |
| 3327 | || (gimple_omp_for_kind (g: stmt) != GF_OMP_FOR_KIND_DISTRIBUTE |
| 3328 | && omp_find_clause (clauses: gimple_omp_for_clauses (gs: stmt), |
| 3329 | kind: OMP_CLAUSE_BIND) == NULL_TREE)) |
| 3330 | && gimple_code (g: stmt) != GIMPLE_OMP_PARALLEL) |
| 3331 | { |
| 3332 | error_at (gimple_location (g: stmt), |
| 3333 | "only %<distribute%>, %<parallel%> or %<loop%> " |
| 3334 | "regions are allowed to be strictly nested inside " |
| 3335 | "%<teams%> region" ); |
| 3336 | return false; |
| 3337 | } |
| 3338 | } |
| 3339 | else if (ctx->order_concurrent |
| 3340 | && gimple_code (g: stmt) != GIMPLE_OMP_PARALLEL |
| 3341 | && (gimple_code (g: stmt) != GIMPLE_OMP_FOR |
| 3342 | || gimple_omp_for_kind (g: stmt) != GF_OMP_FOR_KIND_SIMD) |
| 3343 | && gimple_code (g: stmt) != GIMPLE_OMP_SCAN |
| 3344 | && gimple_code (g: stmt) != GIMPLE_OMP_STRUCTURED_BLOCK) |
| 3345 | { |
| 3346 | if (ctx->loop_p) |
| 3347 | error_at (gimple_location (g: stmt), |
| 3348 | "OpenMP constructs other than %<parallel%>, %<loop%> or " |
| 3349 | "%<simd%> may not be nested inside a %<loop%> region" ); |
| 3350 | else |
| 3351 | error_at (gimple_location (g: stmt), |
| 3352 | "OpenMP constructs other than %<parallel%>, %<loop%> or " |
| 3353 | "%<simd%> may not be nested inside a region with " |
| 3354 | "the %<order(concurrent)%> clause" ); |
| 3355 | return false; |
| 3356 | } |
| 3357 | } |
| 3358 | switch (gimple_code (g: stmt)) |
| 3359 | { |
| 3360 | case GIMPLE_OMP_FOR: |
| 3361 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_SIMD) |
| 3362 | return true; |
| 3363 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_DISTRIBUTE) |
| 3364 | { |
| 3365 | if (ctx != NULL && gimple_code (g: ctx->stmt) != GIMPLE_OMP_TEAMS) |
| 3366 | { |
| 3367 | error_at (gimple_location (g: stmt), |
| 3368 | "%<distribute%> region must be strictly nested " |
| 3369 | "inside %<teams%> construct" ); |
| 3370 | return false; |
| 3371 | } |
| 3372 | return true; |
| 3373 | } |
| 3374 | /* We split taskloop into task and nested taskloop in it. */ |
| 3375 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_TASKLOOP) |
| 3376 | return true; |
| 3377 | /* For now, hope this will change and loop bind(parallel) will not |
| 3378 | be allowed in lots of contexts. */ |
| 3379 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR |
| 3380 | && omp_find_clause (clauses: gimple_omp_for_clauses (gs: stmt), kind: OMP_CLAUSE_BIND)) |
| 3381 | return true; |
| 3382 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_OACC_LOOP) |
| 3383 | { |
| 3384 | bool ok = false; |
| 3385 | |
| 3386 | if (ctx) |
| 3387 | switch (gimple_code (g: ctx->stmt)) |
| 3388 | { |
| 3389 | case GIMPLE_OMP_FOR: |
| 3390 | ok = (gimple_omp_for_kind (g: ctx->stmt) |
| 3391 | == GF_OMP_FOR_KIND_OACC_LOOP); |
| 3392 | break; |
| 3393 | |
| 3394 | case GIMPLE_OMP_TARGET: |
| 3395 | switch (gimple_omp_target_kind (g: ctx->stmt)) |
| 3396 | { |
| 3397 | case GF_OMP_TARGET_KIND_OACC_PARALLEL: |
| 3398 | case GF_OMP_TARGET_KIND_OACC_KERNELS: |
| 3399 | case GF_OMP_TARGET_KIND_OACC_SERIAL: |
| 3400 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED: |
| 3401 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE: |
| 3402 | ok = true; |
| 3403 | break; |
| 3404 | |
| 3405 | default: |
| 3406 | break; |
| 3407 | } |
| 3408 | |
| 3409 | default: |
| 3410 | break; |
| 3411 | } |
| 3412 | else if (oacc_get_fn_attrib (fn: current_function_decl)) |
| 3413 | ok = true; |
| 3414 | if (!ok) |
| 3415 | { |
| 3416 | error_at (gimple_location (g: stmt), |
| 3417 | "OpenACC loop directive must be associated with" |
| 3418 | " an OpenACC compute region" ); |
| 3419 | return false; |
| 3420 | } |
| 3421 | } |
| 3422 | /* FALLTHRU */ |
| 3423 | case GIMPLE_CALL: |
| 3424 | if (is_gimple_call (gs: stmt) |
| 3425 | && (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3426 | == BUILT_IN_GOMP_CANCEL |
| 3427 | || DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3428 | == BUILT_IN_GOMP_CANCELLATION_POINT)) |
| 3429 | { |
| 3430 | const char *bad = NULL; |
| 3431 | const char *kind = NULL; |
| 3432 | const char *construct |
| 3433 | = (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3434 | == BUILT_IN_GOMP_CANCEL) |
| 3435 | ? "cancel" |
| 3436 | : "cancellation point" ; |
| 3437 | if (ctx == NULL) |
| 3438 | { |
| 3439 | error_at (gimple_location (g: stmt), "orphaned %qs construct" , |
| 3440 | construct); |
| 3441 | return false; |
| 3442 | } |
| 3443 | switch (tree_fits_shwi_p (gimple_call_arg (gs: stmt, index: 0)) |
| 3444 | ? tree_to_shwi (gimple_call_arg (gs: stmt, index: 0)) |
| 3445 | : 0) |
| 3446 | { |
| 3447 | case 1: |
| 3448 | if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_PARALLEL) |
| 3449 | bad = "parallel" ; |
| 3450 | else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3451 | == BUILT_IN_GOMP_CANCEL |
| 3452 | && !integer_zerop (gimple_call_arg (gs: stmt, index: 1))) |
| 3453 | ctx->cancellable = true; |
| 3454 | kind = "parallel" ; |
| 3455 | break; |
| 3456 | case 2: |
| 3457 | if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR |
| 3458 | || gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR) |
| 3459 | bad = "for" ; |
| 3460 | else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3461 | == BUILT_IN_GOMP_CANCEL |
| 3462 | && !integer_zerop (gimple_call_arg (gs: stmt, index: 1))) |
| 3463 | { |
| 3464 | ctx->cancellable = true; |
| 3465 | if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 3466 | kind: OMP_CLAUSE_NOWAIT)) |
| 3467 | warning_at (gimple_location (g: stmt), OPT_Wopenmp, |
| 3468 | "%<cancel for%> inside " |
| 3469 | "%<nowait%> for construct" ); |
| 3470 | if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 3471 | kind: OMP_CLAUSE_ORDERED)) |
| 3472 | warning_at (gimple_location (g: stmt), OPT_Wopenmp, |
| 3473 | "%<cancel for%> inside " |
| 3474 | "%<ordered%> for construct" ); |
| 3475 | } |
| 3476 | kind = "for" ; |
| 3477 | break; |
| 3478 | case 4: |
| 3479 | if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_SECTIONS |
| 3480 | && gimple_code (g: ctx->stmt) != GIMPLE_OMP_SECTION) |
| 3481 | bad = "sections" ; |
| 3482 | else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3483 | == BUILT_IN_GOMP_CANCEL |
| 3484 | && !integer_zerop (gimple_call_arg (gs: stmt, index: 1))) |
| 3485 | { |
| 3486 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS) |
| 3487 | { |
| 3488 | ctx->cancellable = true; |
| 3489 | if (omp_find_clause (clauses: gimple_omp_sections_clauses |
| 3490 | (gs: ctx->stmt), |
| 3491 | kind: OMP_CLAUSE_NOWAIT)) |
| 3492 | warning_at (gimple_location (g: stmt), OPT_Wopenmp, |
| 3493 | "%<cancel sections%> inside " |
| 3494 | "%<nowait%> sections construct" ); |
| 3495 | } |
| 3496 | else |
| 3497 | { |
| 3498 | gcc_assert (ctx->outer |
| 3499 | && gimple_code (ctx->outer->stmt) |
| 3500 | == GIMPLE_OMP_SECTIONS); |
| 3501 | ctx->outer->cancellable = true; |
| 3502 | if (omp_find_clause (clauses: gimple_omp_sections_clauses |
| 3503 | (gs: ctx->outer->stmt), |
| 3504 | kind: OMP_CLAUSE_NOWAIT)) |
| 3505 | warning_at (gimple_location (g: stmt), OPT_Wopenmp, |
| 3506 | "%<cancel sections%> inside " |
| 3507 | "%<nowait%> sections construct" ); |
| 3508 | } |
| 3509 | } |
| 3510 | kind = "sections" ; |
| 3511 | break; |
| 3512 | case 8: |
| 3513 | if (!is_task_ctx (ctx) |
| 3514 | && (!is_taskloop_ctx (ctx) |
| 3515 | || ctx->outer == NULL |
| 3516 | || !is_task_ctx (ctx: ctx->outer))) |
| 3517 | bad = "task" ; |
| 3518 | else |
| 3519 | { |
| 3520 | for (omp_context *octx = ctx->outer; |
| 3521 | octx; octx = octx->outer) |
| 3522 | { |
| 3523 | switch (gimple_code (g: octx->stmt)) |
| 3524 | { |
| 3525 | case GIMPLE_OMP_TASKGROUP: |
| 3526 | break; |
| 3527 | case GIMPLE_OMP_TARGET: |
| 3528 | if (gimple_omp_target_kind (g: octx->stmt) |
| 3529 | != GF_OMP_TARGET_KIND_REGION) |
| 3530 | continue; |
| 3531 | /* FALLTHRU */ |
| 3532 | case GIMPLE_OMP_PARALLEL: |
| 3533 | case GIMPLE_OMP_TEAMS: |
| 3534 | error_at (gimple_location (g: stmt), |
| 3535 | "%<%s taskgroup%> construct not closely " |
| 3536 | "nested inside of %<taskgroup%> region" , |
| 3537 | construct); |
| 3538 | return false; |
| 3539 | case GIMPLE_OMP_TASK: |
| 3540 | if (gimple_omp_task_taskloop_p (g: octx->stmt) |
| 3541 | && octx->outer |
| 3542 | && is_taskloop_ctx (ctx: octx->outer)) |
| 3543 | { |
| 3544 | tree clauses |
| 3545 | = gimple_omp_for_clauses (gs: octx->outer->stmt); |
| 3546 | if (!omp_find_clause (clauses, kind: OMP_CLAUSE_NOGROUP)) |
| 3547 | break; |
| 3548 | } |
| 3549 | continue; |
| 3550 | default: |
| 3551 | continue; |
| 3552 | } |
| 3553 | break; |
| 3554 | } |
| 3555 | ctx->cancellable = true; |
| 3556 | } |
| 3557 | kind = "taskgroup" ; |
| 3558 | break; |
| 3559 | default: |
| 3560 | error_at (gimple_location (g: stmt), "invalid arguments" ); |
| 3561 | return false; |
| 3562 | } |
| 3563 | if (bad) |
| 3564 | { |
| 3565 | error_at (gimple_location (g: stmt), |
| 3566 | "%<%s %s%> construct not closely nested inside of %qs" , |
| 3567 | construct, kind, bad); |
| 3568 | return false; |
| 3569 | } |
| 3570 | } |
| 3571 | /* FALLTHRU */ |
| 3572 | case GIMPLE_OMP_SECTIONS: |
| 3573 | case GIMPLE_OMP_SINGLE: |
| 3574 | for (; ctx != NULL; ctx = ctx->outer) |
| 3575 | switch (gimple_code (g: ctx->stmt)) |
| 3576 | { |
| 3577 | case GIMPLE_OMP_FOR: |
| 3578 | if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR |
| 3579 | && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP) |
| 3580 | break; |
| 3581 | /* FALLTHRU */ |
| 3582 | case GIMPLE_OMP_SECTIONS: |
| 3583 | case GIMPLE_OMP_SINGLE: |
| 3584 | case GIMPLE_OMP_ORDERED: |
| 3585 | case GIMPLE_OMP_MASTER: |
| 3586 | case GIMPLE_OMP_MASKED: |
| 3587 | case GIMPLE_OMP_TASK: |
| 3588 | case GIMPLE_OMP_CRITICAL: |
| 3589 | if (is_gimple_call (gs: stmt)) |
| 3590 | { |
| 3591 | if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt)) |
| 3592 | != BUILT_IN_GOMP_BARRIER) |
| 3593 | return true; |
| 3594 | error_at (gimple_location (g: stmt), |
| 3595 | "barrier region may not be closely nested inside " |
| 3596 | "of work-sharing, %<loop%>, %<critical%>, " |
| 3597 | "%<ordered%>, %<master%>, %<masked%>, explicit " |
| 3598 | "%<task%> or %<taskloop%> region" ); |
| 3599 | return false; |
| 3600 | } |
| 3601 | error_at (gimple_location (g: stmt), |
| 3602 | "work-sharing region may not be closely nested inside " |
| 3603 | "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, " |
| 3604 | "%<master%>, %<masked%>, explicit %<task%> or " |
| 3605 | "%<taskloop%> region" ); |
| 3606 | return false; |
| 3607 | case GIMPLE_OMP_PARALLEL: |
| 3608 | case GIMPLE_OMP_TEAMS: |
| 3609 | return true; |
| 3610 | case GIMPLE_OMP_TARGET: |
| 3611 | if (gimple_omp_target_kind (g: ctx->stmt) |
| 3612 | == GF_OMP_TARGET_KIND_REGION) |
| 3613 | return true; |
| 3614 | break; |
| 3615 | default: |
| 3616 | break; |
| 3617 | } |
| 3618 | break; |
| 3619 | case GIMPLE_OMP_MASTER: |
| 3620 | case GIMPLE_OMP_MASKED: |
| 3621 | for (; ctx != NULL; ctx = ctx->outer) |
| 3622 | switch (gimple_code (g: ctx->stmt)) |
| 3623 | { |
| 3624 | case GIMPLE_OMP_FOR: |
| 3625 | if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR |
| 3626 | && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP) |
| 3627 | break; |
| 3628 | /* FALLTHRU */ |
| 3629 | case GIMPLE_OMP_SECTIONS: |
| 3630 | case GIMPLE_OMP_SINGLE: |
| 3631 | case GIMPLE_OMP_TASK: |
| 3632 | error_at (gimple_location (g: stmt), |
| 3633 | "%qs region may not be closely nested inside " |
| 3634 | "of work-sharing, %<loop%>, explicit %<task%> or " |
| 3635 | "%<taskloop%> region" , |
| 3636 | gimple_code (g: stmt) == GIMPLE_OMP_MASTER |
| 3637 | ? "master" : "masked" ); |
| 3638 | return false; |
| 3639 | case GIMPLE_OMP_PARALLEL: |
| 3640 | case GIMPLE_OMP_TEAMS: |
| 3641 | return true; |
| 3642 | case GIMPLE_OMP_TARGET: |
| 3643 | if (gimple_omp_target_kind (g: ctx->stmt) |
| 3644 | == GF_OMP_TARGET_KIND_REGION) |
| 3645 | return true; |
| 3646 | break; |
| 3647 | default: |
| 3648 | break; |
| 3649 | } |
| 3650 | break; |
| 3651 | case GIMPLE_OMP_SCOPE: |
| 3652 | for (; ctx != NULL; ctx = ctx->outer) |
| 3653 | switch (gimple_code (g: ctx->stmt)) |
| 3654 | { |
| 3655 | case GIMPLE_OMP_FOR: |
| 3656 | if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR |
| 3657 | && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP) |
| 3658 | break; |
| 3659 | /* FALLTHRU */ |
| 3660 | case GIMPLE_OMP_SECTIONS: |
| 3661 | case GIMPLE_OMP_SINGLE: |
| 3662 | case GIMPLE_OMP_TASK: |
| 3663 | case GIMPLE_OMP_CRITICAL: |
| 3664 | case GIMPLE_OMP_ORDERED: |
| 3665 | case GIMPLE_OMP_MASTER: |
| 3666 | case GIMPLE_OMP_MASKED: |
| 3667 | error_at (gimple_location (g: stmt), |
| 3668 | "%<scope%> region may not be closely nested inside " |
| 3669 | "of work-sharing, %<loop%>, explicit %<task%>, " |
| 3670 | "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, " |
| 3671 | "or %<masked%> region" ); |
| 3672 | return false; |
| 3673 | case GIMPLE_OMP_PARALLEL: |
| 3674 | case GIMPLE_OMP_TEAMS: |
| 3675 | return true; |
| 3676 | case GIMPLE_OMP_TARGET: |
| 3677 | if (gimple_omp_target_kind (g: ctx->stmt) |
| 3678 | == GF_OMP_TARGET_KIND_REGION) |
| 3679 | return true; |
| 3680 | break; |
| 3681 | default: |
| 3682 | break; |
| 3683 | } |
| 3684 | break; |
| 3685 | case GIMPLE_OMP_TASK: |
| 3686 | for (c = gimple_omp_task_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 3687 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS) |
| 3688 | { |
| 3689 | enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c); |
| 3690 | error_at (OMP_CLAUSE_LOCATION (c), |
| 3691 | "%<%s(%s)%> is only allowed in %<omp ordered%>" , |
| 3692 | OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross" , |
| 3693 | kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink" ); |
| 3694 | return false; |
| 3695 | } |
| 3696 | break; |
| 3697 | case GIMPLE_OMP_ORDERED: |
| 3698 | for (c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt)); |
| 3699 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 3700 | { |
| 3701 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS) |
| 3702 | { |
| 3703 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) |
| 3704 | { |
| 3705 | error_at (OMP_CLAUSE_LOCATION (c), |
| 3706 | "invalid depend kind in omp %<ordered%> %<depend%>" ); |
| 3707 | return false; |
| 3708 | } |
| 3709 | gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS |
| 3710 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD); |
| 3711 | continue; |
| 3712 | } |
| 3713 | |
| 3714 | tree oclause; |
| 3715 | /* Look for containing ordered(N) loop. */ |
| 3716 | if (ctx == NULL |
| 3717 | || gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR |
| 3718 | || (oclause |
| 3719 | = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 3720 | kind: OMP_CLAUSE_ORDERED)) == NULL_TREE) |
| 3721 | { |
| 3722 | error_at (OMP_CLAUSE_LOCATION (c), |
| 3723 | "%<ordered%> construct with %<depend%> clause " |
| 3724 | "must be closely nested inside an %<ordered%> loop" ); |
| 3725 | return false; |
| 3726 | } |
| 3727 | } |
| 3728 | c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt)); |
| 3729 | if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_SIMD)) |
| 3730 | { |
| 3731 | /* ordered simd must be closely nested inside of simd region, |
| 3732 | and simd region must not encounter constructs other than |
| 3733 | ordered simd, therefore ordered simd may be either orphaned, |
| 3734 | or ctx->stmt must be simd. The latter case is handled already |
| 3735 | earlier. */ |
| 3736 | if (ctx != NULL) |
| 3737 | { |
| 3738 | error_at (gimple_location (g: stmt), |
| 3739 | "%<ordered%> %<simd%> must be closely nested inside " |
| 3740 | "%<simd%> region" ); |
| 3741 | return false; |
| 3742 | } |
| 3743 | } |
| 3744 | for (; ctx != NULL; ctx = ctx->outer) |
| 3745 | switch (gimple_code (g: ctx->stmt)) |
| 3746 | { |
| 3747 | case GIMPLE_OMP_CRITICAL: |
| 3748 | case GIMPLE_OMP_TASK: |
| 3749 | case GIMPLE_OMP_ORDERED: |
| 3750 | ordered_in_taskloop: |
| 3751 | error_at (gimple_location (g: stmt), |
| 3752 | "%<ordered%> region may not be closely nested inside " |
| 3753 | "of %<critical%>, %<ordered%>, explicit %<task%> or " |
| 3754 | "%<taskloop%> region" ); |
| 3755 | return false; |
| 3756 | case GIMPLE_OMP_FOR: |
| 3757 | if (gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP) |
| 3758 | goto ordered_in_taskloop; |
| 3759 | tree o; |
| 3760 | o = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 3761 | kind: OMP_CLAUSE_ORDERED); |
| 3762 | if (o == NULL) |
| 3763 | { |
| 3764 | error_at (gimple_location (g: stmt), |
| 3765 | "%<ordered%> region must be closely nested inside " |
| 3766 | "a loop region with an %<ordered%> clause" ); |
| 3767 | return false; |
| 3768 | } |
| 3769 | if (!gimple_omp_ordered_standalone_p (g: stmt)) |
| 3770 | { |
| 3771 | if (OMP_CLAUSE_ORDERED_DOACROSS (o)) |
| 3772 | { |
| 3773 | error_at (gimple_location (g: stmt), |
| 3774 | "%<ordered%> construct without %<doacross%> or " |
| 3775 | "%<depend%> clauses must not have the same " |
| 3776 | "binding region as %<ordered%> construct with " |
| 3777 | "those clauses" ); |
| 3778 | return false; |
| 3779 | } |
| 3780 | else if (OMP_CLAUSE_ORDERED_EXPR (o)) |
| 3781 | { |
| 3782 | tree co |
| 3783 | = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 3784 | kind: OMP_CLAUSE_COLLAPSE); |
| 3785 | HOST_WIDE_INT |
| 3786 | o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o)); |
| 3787 | HOST_WIDE_INT c_n = 1; |
| 3788 | if (co) |
| 3789 | c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co)); |
| 3790 | if (o_n != c_n) |
| 3791 | { |
| 3792 | error_at (gimple_location (g: stmt), |
| 3793 | "%<ordered%> construct without %<doacross%> " |
| 3794 | "or %<depend%> clauses binds to loop where " |
| 3795 | "%<collapse%> argument %wd is different from " |
| 3796 | "%<ordered%> argument %wd" , c_n, o_n); |
| 3797 | return false; |
| 3798 | } |
| 3799 | } |
| 3800 | } |
| 3801 | return true; |
| 3802 | case GIMPLE_OMP_TARGET: |
| 3803 | if (gimple_omp_target_kind (g: ctx->stmt) |
| 3804 | != GF_OMP_TARGET_KIND_REGION) |
| 3805 | break; |
| 3806 | /* FALLTHRU */ |
| 3807 | case GIMPLE_OMP_PARALLEL: |
| 3808 | case GIMPLE_OMP_TEAMS: |
| 3809 | error_at (gimple_location (g: stmt), |
| 3810 | "%<ordered%> region must be closely nested inside " |
| 3811 | "a loop region with an %<ordered%> clause" ); |
| 3812 | return false; |
| 3813 | default: |
| 3814 | break; |
| 3815 | } |
| 3816 | break; |
| 3817 | case GIMPLE_OMP_CRITICAL: |
| 3818 | { |
| 3819 | tree this_stmt_name |
| 3820 | = gimple_omp_critical_name (crit_stmt: as_a <gomp_critical *> (p: stmt)); |
| 3821 | for (; ctx != NULL; ctx = ctx->outer) |
| 3822 | if (gomp_critical *other_crit |
| 3823 | = dyn_cast <gomp_critical *> (p: ctx->stmt)) |
| 3824 | if (this_stmt_name == gimple_omp_critical_name (crit_stmt: other_crit)) |
| 3825 | { |
| 3826 | error_at (gimple_location (g: stmt), |
| 3827 | "%<critical%> region may not be nested inside " |
| 3828 | "a %<critical%> region with the same name" ); |
| 3829 | return false; |
| 3830 | } |
| 3831 | } |
| 3832 | break; |
| 3833 | case GIMPLE_OMP_TEAMS: |
| 3834 | if (ctx == NULL) |
| 3835 | break; |
| 3836 | else if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_TARGET |
| 3837 | || (gimple_omp_target_kind (g: ctx->stmt) |
| 3838 | != GF_OMP_TARGET_KIND_REGION)) |
| 3839 | { |
| 3840 | /* Teams construct can appear either strictly nested inside of |
| 3841 | target construct with no intervening stmts, or can be encountered |
| 3842 | only by initial task (so must not appear inside any OpenMP |
| 3843 | construct. */ |
| 3844 | error_at (gimple_location (g: stmt), |
| 3845 | "%<teams%> construct must be closely nested inside of " |
| 3846 | "%<target%> construct or not nested in any OpenMP " |
| 3847 | "construct" ); |
| 3848 | return false; |
| 3849 | } |
| 3850 | break; |
| 3851 | case GIMPLE_OMP_TARGET: |
| 3852 | for (c = gimple_omp_target_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 3853 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS) |
| 3854 | { |
| 3855 | enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c); |
| 3856 | error_at (OMP_CLAUSE_LOCATION (c), |
| 3857 | "%<depend(%s)%> is only allowed in %<omp ordered%>" , |
| 3858 | kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink" ); |
| 3859 | return false; |
| 3860 | } |
| 3861 | if (is_gimple_omp_offloaded (stmt) |
| 3862 | && oacc_get_fn_attrib (cfun->decl) != NULL) |
| 3863 | { |
| 3864 | error_at (gimple_location (g: stmt), |
| 3865 | "OpenACC region inside of OpenACC routine, nested " |
| 3866 | "parallelism not supported yet" ); |
| 3867 | return false; |
| 3868 | } |
| 3869 | for (; ctx != NULL; ctx = ctx->outer) |
| 3870 | { |
| 3871 | if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_TARGET) |
| 3872 | { |
| 3873 | if (is_gimple_omp (stmt) |
| 3874 | && is_gimple_omp_oacc (stmt) |
| 3875 | && is_gimple_omp (stmt: ctx->stmt)) |
| 3876 | { |
| 3877 | error_at (gimple_location (g: stmt), |
| 3878 | "OpenACC construct inside of non-OpenACC region" ); |
| 3879 | return false; |
| 3880 | } |
| 3881 | continue; |
| 3882 | } |
| 3883 | |
| 3884 | const char *stmt_name, *ctx_stmt_name; |
| 3885 | switch (gimple_omp_target_kind (g: stmt)) |
| 3886 | { |
| 3887 | case GF_OMP_TARGET_KIND_REGION: stmt_name = "target" ; break; |
| 3888 | case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data" ; break; |
| 3889 | case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update" ; break; |
| 3890 | case GF_OMP_TARGET_KIND_ENTER_DATA: |
| 3891 | stmt_name = "target enter data" ; break; |
| 3892 | case GF_OMP_TARGET_KIND_EXIT_DATA: |
| 3893 | stmt_name = "target exit data" ; break; |
| 3894 | case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel" ; break; |
| 3895 | case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels" ; break; |
| 3896 | case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial" ; break; |
| 3897 | case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data" ; break; |
| 3898 | case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update" ; break; |
| 3899 | case GF_OMP_TARGET_KIND_OACC_ENTER_DATA: |
| 3900 | stmt_name = "enter data" ; break; |
| 3901 | case GF_OMP_TARGET_KIND_OACC_EXIT_DATA: |
| 3902 | stmt_name = "exit data" ; break; |
| 3903 | case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare" ; break; |
| 3904 | case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data" ; |
| 3905 | break; |
| 3906 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED: |
| 3907 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE: |
| 3908 | case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS: |
| 3909 | /* OpenACC 'kernels' decomposed parts. */ |
| 3910 | stmt_name = "kernels" ; break; |
| 3911 | default: gcc_unreachable (); |
| 3912 | } |
| 3913 | switch (gimple_omp_target_kind (g: ctx->stmt)) |
| 3914 | { |
| 3915 | case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target" ; break; |
| 3916 | case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data" ; break; |
| 3917 | case GF_OMP_TARGET_KIND_OACC_PARALLEL: |
| 3918 | ctx_stmt_name = "parallel" ; break; |
| 3919 | case GF_OMP_TARGET_KIND_OACC_KERNELS: |
| 3920 | ctx_stmt_name = "kernels" ; break; |
| 3921 | case GF_OMP_TARGET_KIND_OACC_SERIAL: |
| 3922 | ctx_stmt_name = "serial" ; break; |
| 3923 | case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data" ; break; |
| 3924 | case GF_OMP_TARGET_KIND_OACC_HOST_DATA: |
| 3925 | ctx_stmt_name = "host_data" ; break; |
| 3926 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED: |
| 3927 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE: |
| 3928 | case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS: |
| 3929 | /* OpenACC 'kernels' decomposed parts. */ |
| 3930 | ctx_stmt_name = "kernels" ; break; |
| 3931 | default: gcc_unreachable (); |
| 3932 | } |
| 3933 | |
| 3934 | /* OpenACC/OpenMP mismatch? */ |
| 3935 | if (is_gimple_omp_oacc (stmt) |
| 3936 | != is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 3937 | { |
| 3938 | error_at (gimple_location (g: stmt), |
| 3939 | "%s %qs construct inside of %s %qs region" , |
| 3940 | (is_gimple_omp_oacc (stmt) |
| 3941 | ? "OpenACC" : "OpenMP" ), stmt_name, |
| 3942 | (is_gimple_omp_oacc (stmt: ctx->stmt) |
| 3943 | ? "OpenACC" : "OpenMP" ), ctx_stmt_name); |
| 3944 | return false; |
| 3945 | } |
| 3946 | if (is_gimple_omp_offloaded (stmt: ctx->stmt)) |
| 3947 | { |
| 3948 | /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */ |
| 3949 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 3950 | { |
| 3951 | error_at (gimple_location (g: stmt), |
| 3952 | "%qs construct inside of %qs region" , |
| 3953 | stmt_name, ctx_stmt_name); |
| 3954 | return false; |
| 3955 | } |
| 3956 | else |
| 3957 | { |
| 3958 | if ((gimple_omp_target_kind (g: ctx->stmt) |
| 3959 | == GF_OMP_TARGET_KIND_REGION) |
| 3960 | && (gimple_omp_target_kind (g: stmt) |
| 3961 | == GF_OMP_TARGET_KIND_REGION)) |
| 3962 | { |
| 3963 | c = omp_find_clause (clauses: gimple_omp_target_clauses (gs: stmt), |
| 3964 | kind: OMP_CLAUSE_DEVICE); |
| 3965 | if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c)) |
| 3966 | break; |
| 3967 | } |
| 3968 | warning_at (gimple_location (g: stmt), OPT_Wopenmp, |
| 3969 | "%qs construct inside of %qs region" , |
| 3970 | stmt_name, ctx_stmt_name); |
| 3971 | } |
| 3972 | } |
| 3973 | } |
| 3974 | break; |
| 3975 | default: |
| 3976 | break; |
| 3977 | } |
| 3978 | return true; |
| 3979 | } |
| 3980 | |
| 3981 | |
| 3982 | /* Helper function scan_omp. |
| 3983 | |
| 3984 | Callback for walk_tree or operators in walk_gimple_stmt used to |
| 3985 | scan for OMP directives in TP. */ |
| 3986 | |
| 3987 | static tree |
| 3988 | scan_omp_1_op (tree *tp, int *walk_subtrees, void *data) |
| 3989 | { |
| 3990 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
| 3991 | omp_context *ctx = (omp_context *) wi->info; |
| 3992 | tree t = *tp; |
| 3993 | tree tmp; |
| 3994 | |
| 3995 | switch (TREE_CODE (t)) |
| 3996 | { |
| 3997 | case VAR_DECL: |
| 3998 | case PARM_DECL: |
| 3999 | case LABEL_DECL: |
| 4000 | case RESULT_DECL: |
| 4001 | if (ctx) |
| 4002 | { |
| 4003 | tmp = NULL_TREE; |
| 4004 | if (TREE_CODE (t) == VAR_DECL |
| 4005 | && (tmp = lookup_attribute (attr_name: "omp allocate var" , |
| 4006 | DECL_ATTRIBUTES (t))) != NULL_TREE) |
| 4007 | t = TREE_VALUE (TREE_VALUE (tmp)); |
| 4008 | tree repl = remap_decl (decl: t, id: &ctx->cb); |
| 4009 | gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK); |
| 4010 | if (tmp != NULL_TREE && t != repl) |
| 4011 | *tp = build_fold_addr_expr (repl); |
| 4012 | else if (tmp == NULL_TREE) |
| 4013 | *tp = repl; |
| 4014 | } |
| 4015 | break; |
| 4016 | |
| 4017 | case INDIRECT_REF: |
| 4018 | case MEM_REF: |
| 4019 | if (ctx |
| 4020 | && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL |
| 4021 | && ((tmp = lookup_attribute (attr_name: "omp allocate var" , |
| 4022 | DECL_ATTRIBUTES (TREE_OPERAND (t, 0)))) |
| 4023 | != NULL_TREE)) |
| 4024 | { |
| 4025 | tmp = TREE_VALUE (TREE_VALUE (tmp)); |
| 4026 | tree repl = remap_decl (decl: tmp, id: &ctx->cb); |
| 4027 | gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK); |
| 4028 | if (tmp != repl) |
| 4029 | *tp = repl; |
| 4030 | break; |
| 4031 | } |
| 4032 | gcc_fallthrough (); |
| 4033 | |
| 4034 | default: |
| 4035 | if (ctx && TYPE_P (t)) |
| 4036 | *tp = remap_type (type: t, id: &ctx->cb); |
| 4037 | else if (!DECL_P (t)) |
| 4038 | { |
| 4039 | *walk_subtrees = 1; |
| 4040 | if (ctx) |
| 4041 | { |
| 4042 | tree tem = remap_type (TREE_TYPE (t), id: &ctx->cb); |
| 4043 | if (tem != TREE_TYPE (t)) |
| 4044 | { |
| 4045 | if (TREE_CODE (t) == INTEGER_CST) |
| 4046 | *tp = wide_int_to_tree (type: tem, cst: wi::to_wide (t)); |
| 4047 | else |
| 4048 | TREE_TYPE (t) = tem; |
| 4049 | } |
| 4050 | } |
| 4051 | } |
| 4052 | break; |
| 4053 | } |
| 4054 | |
| 4055 | return NULL_TREE; |
| 4056 | } |
| 4057 | |
| 4058 | /* Return true if FNDECL is a setjmp or a longjmp. */ |
| 4059 | |
| 4060 | static bool |
| 4061 | setjmp_or_longjmp_p (const_tree fndecl) |
| 4062 | { |
| 4063 | if (fndecl_built_in_p (node: fndecl, name1: BUILT_IN_SETJMP, names: BUILT_IN_LONGJMP)) |
| 4064 | return true; |
| 4065 | |
| 4066 | tree declname = DECL_NAME (fndecl); |
| 4067 | if (!declname |
| 4068 | || (DECL_CONTEXT (fndecl) != NULL_TREE |
| 4069 | && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL) |
| 4070 | || !TREE_PUBLIC (fndecl)) |
| 4071 | return false; |
| 4072 | |
| 4073 | const char *name = IDENTIFIER_POINTER (declname); |
| 4074 | return !strcmp (s1: name, s2: "setjmp" ) || !strcmp (s1: name, s2: "longjmp" ); |
| 4075 | } |
| 4076 | |
| 4077 | /* Helper function for scan_omp. |
| 4078 | |
| 4079 | Callback for walk_gimple_stmt used to scan for OMP directives in |
| 4080 | the current statement in GSI. */ |
| 4081 | |
| 4082 | static tree |
| 4083 | scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
| 4084 | struct walk_stmt_info *wi) |
| 4085 | { |
| 4086 | gimple *stmt = gsi_stmt (i: *gsi); |
| 4087 | omp_context *ctx = (omp_context *) wi->info; |
| 4088 | |
| 4089 | if (gimple_has_location (g: stmt)) |
| 4090 | input_location = gimple_location (g: stmt); |
| 4091 | |
| 4092 | /* Check the nesting restrictions. */ |
| 4093 | bool remove = false; |
| 4094 | if (is_gimple_omp (stmt)) |
| 4095 | remove = !check_omp_nesting_restrictions (stmt, ctx); |
| 4096 | else if (is_gimple_call (gs: stmt)) |
| 4097 | { |
| 4098 | tree fndecl = gimple_call_fndecl (gs: stmt); |
| 4099 | if (fndecl) |
| 4100 | { |
| 4101 | if (ctx |
| 4102 | && gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 4103 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD |
| 4104 | && setjmp_or_longjmp_p (fndecl) |
| 4105 | && !ctx->loop_p) |
| 4106 | { |
| 4107 | remove = true; |
| 4108 | error_at (gimple_location (g: stmt), |
| 4109 | "setjmp/longjmp inside %<simd%> construct" ); |
| 4110 | } |
| 4111 | else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
| 4112 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
| 4113 | { |
| 4114 | case BUILT_IN_GOMP_BARRIER: |
| 4115 | case BUILT_IN_GOMP_CANCEL: |
| 4116 | case BUILT_IN_GOMP_CANCELLATION_POINT: |
| 4117 | case BUILT_IN_GOMP_TASKYIELD: |
| 4118 | case BUILT_IN_GOMP_TASKWAIT: |
| 4119 | case BUILT_IN_GOMP_TASKGROUP_START: |
| 4120 | case BUILT_IN_GOMP_TASKGROUP_END: |
| 4121 | remove = !check_omp_nesting_restrictions (stmt, ctx); |
| 4122 | break; |
| 4123 | default: |
| 4124 | break; |
| 4125 | } |
| 4126 | else if (ctx) |
| 4127 | { |
| 4128 | omp_context *octx = ctx; |
| 4129 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer) |
| 4130 | octx = ctx->outer; |
| 4131 | if (octx->order_concurrent && omp_runtime_api_call (fndecl)) |
| 4132 | { |
| 4133 | remove = true; |
| 4134 | error_at (gimple_location (g: stmt), |
| 4135 | "OpenMP runtime API call %qD in a region with " |
| 4136 | "%<order(concurrent)%> clause" , fndecl); |
| 4137 | } |
| 4138 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 4139 | && omp_runtime_api_call (fndecl) |
| 4140 | && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) |
| 4141 | != strlen (s: "omp_get_num_teams" )) |
| 4142 | || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), |
| 4143 | s2: "omp_get_num_teams" ) != 0) |
| 4144 | && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) |
| 4145 | != strlen (s: "omp_get_team_num" )) |
| 4146 | || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)), |
| 4147 | s2: "omp_get_team_num" ) != 0)) |
| 4148 | { |
| 4149 | remove = true; |
| 4150 | error_at (gimple_location (g: stmt), |
| 4151 | "OpenMP runtime API call %qD strictly nested in a " |
| 4152 | "%<teams%> region" , fndecl); |
| 4153 | } |
| 4154 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET |
| 4155 | && (gimple_omp_target_kind (g: ctx->stmt) |
| 4156 | == GF_OMP_TARGET_KIND_REGION) |
| 4157 | && omp_runtime_api_call (fndecl)) |
| 4158 | { |
| 4159 | tree tgt_clauses = gimple_omp_target_clauses (gs: ctx->stmt); |
| 4160 | tree c = omp_find_clause (clauses: tgt_clauses, kind: OMP_CLAUSE_DEVICE); |
| 4161 | if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c)) |
| 4162 | error_at (gimple_location (g: stmt), |
| 4163 | "OpenMP runtime API call %qD in a region with " |
| 4164 | "%<device(ancestor)%> clause" , fndecl); |
| 4165 | } |
| 4166 | } |
| 4167 | } |
| 4168 | } |
| 4169 | if (remove) |
| 4170 | { |
| 4171 | stmt = gimple_build_nop (); |
| 4172 | gsi_replace (gsi, stmt, false); |
| 4173 | } |
| 4174 | |
| 4175 | *handled_ops_p = true; |
| 4176 | |
| 4177 | switch (gimple_code (g: stmt)) |
| 4178 | { |
| 4179 | case GIMPLE_OMP_PARALLEL: |
| 4180 | taskreg_nesting_level++; |
| 4181 | scan_omp_parallel (gsi, outer_ctx: ctx); |
| 4182 | taskreg_nesting_level--; |
| 4183 | break; |
| 4184 | |
| 4185 | case GIMPLE_OMP_TASK: |
| 4186 | taskreg_nesting_level++; |
| 4187 | scan_omp_task (gsi, outer_ctx: ctx); |
| 4188 | taskreg_nesting_level--; |
| 4189 | break; |
| 4190 | |
| 4191 | case GIMPLE_OMP_FOR: |
| 4192 | if ((gimple_omp_for_kind (g: as_a <gomp_for *> (p: stmt)) |
| 4193 | == GF_OMP_FOR_KIND_SIMD) |
| 4194 | && gimple_omp_for_combined_into_p (g: stmt) |
| 4195 | && gimple_code (g: ctx->stmt) != GIMPLE_OMP_SCAN) |
| 4196 | { |
| 4197 | tree clauses = gimple_omp_for_clauses (gs: as_a <gomp_for *> (p: stmt)); |
| 4198 | tree c = omp_find_clause (clauses, kind: OMP_CLAUSE_REDUCTION); |
| 4199 | if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ()) |
| 4200 | { |
| 4201 | scan_omp_simd_scan (gsi, stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx); |
| 4202 | break; |
| 4203 | } |
| 4204 | } |
| 4205 | if ((gimple_omp_for_kind (g: as_a <gomp_for *> (p: stmt)) |
| 4206 | == GF_OMP_FOR_KIND_SIMD) |
| 4207 | && omp_maybe_offloaded_ctx (ctx) |
| 4208 | && omp_max_simt_vf () |
| 4209 | && gimple_omp_for_collapse (gs: stmt) == 1) |
| 4210 | scan_omp_simd (gsi, stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx); |
| 4211 | else |
| 4212 | scan_omp_for (stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx); |
| 4213 | break; |
| 4214 | |
| 4215 | case GIMPLE_OMP_SCOPE: |
| 4216 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4217 | scan_sharing_clauses (clauses: gimple_omp_scope_clauses (gs: stmt), ctx); |
| 4218 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 4219 | break; |
| 4220 | |
| 4221 | case GIMPLE_OMP_DISPATCH: |
| 4222 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4223 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 4224 | break; |
| 4225 | |
| 4226 | case GIMPLE_OMP_INTEROP: |
| 4227 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4228 | break; |
| 4229 | |
| 4230 | case GIMPLE_OMP_SECTIONS: |
| 4231 | scan_omp_sections (stmt: as_a <gomp_sections *> (p: stmt), outer_ctx: ctx); |
| 4232 | break; |
| 4233 | |
| 4234 | case GIMPLE_OMP_SINGLE: |
| 4235 | scan_omp_single (stmt: as_a <gomp_single *> (p: stmt), outer_ctx: ctx); |
| 4236 | break; |
| 4237 | |
| 4238 | case GIMPLE_OMP_SCAN: |
| 4239 | if (tree clauses = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt))) |
| 4240 | { |
| 4241 | if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE) |
| 4242 | ctx->scan_inclusive = true; |
| 4243 | else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE) |
| 4244 | ctx->scan_exclusive = true; |
| 4245 | } |
| 4246 | /* FALLTHRU */ |
| 4247 | case GIMPLE_OMP_SECTION: |
| 4248 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
| 4249 | case GIMPLE_OMP_MASTER: |
| 4250 | case GIMPLE_OMP_ORDERED: |
| 4251 | case GIMPLE_OMP_CRITICAL: |
| 4252 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4253 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 4254 | break; |
| 4255 | |
| 4256 | case GIMPLE_OMP_MASKED: |
| 4257 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4258 | scan_sharing_clauses (clauses: gimple_omp_masked_clauses (gs: stmt), ctx); |
| 4259 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 4260 | break; |
| 4261 | |
| 4262 | case GIMPLE_OMP_TASKGROUP: |
| 4263 | ctx = new_omp_context (stmt, outer_ctx: ctx); |
| 4264 | scan_sharing_clauses (clauses: gimple_omp_taskgroup_clauses (gs: stmt), ctx); |
| 4265 | scan_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 4266 | break; |
| 4267 | |
| 4268 | case GIMPLE_OMP_TARGET: |
| 4269 | if (is_gimple_omp_offloaded (stmt)) |
| 4270 | { |
| 4271 | taskreg_nesting_level++; |
| 4272 | scan_omp_target (stmt: as_a <gomp_target *> (p: stmt), outer_ctx: ctx); |
| 4273 | taskreg_nesting_level--; |
| 4274 | } |
| 4275 | else |
| 4276 | scan_omp_target (stmt: as_a <gomp_target *> (p: stmt), outer_ctx: ctx); |
| 4277 | break; |
| 4278 | |
| 4279 | case GIMPLE_OMP_TEAMS: |
| 4280 | if (gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
| 4281 | { |
| 4282 | taskreg_nesting_level++; |
| 4283 | scan_omp_teams (stmt: as_a <gomp_teams *> (p: stmt), outer_ctx: ctx); |
| 4284 | taskreg_nesting_level--; |
| 4285 | } |
| 4286 | else |
| 4287 | scan_omp_teams (stmt: as_a <gomp_teams *> (p: stmt), outer_ctx: ctx); |
| 4288 | break; |
| 4289 | |
| 4290 | case GIMPLE_BIND: |
| 4291 | { |
| 4292 | tree var; |
| 4293 | |
| 4294 | *handled_ops_p = false; |
| 4295 | if (ctx) |
| 4296 | for (var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt)); |
| 4297 | var ; |
| 4298 | var = DECL_CHAIN (var)) |
| 4299 | insert_decl_map (&ctx->cb, var, var); |
| 4300 | } |
| 4301 | break; |
| 4302 | default: |
| 4303 | *handled_ops_p = false; |
| 4304 | break; |
| 4305 | } |
| 4306 | |
| 4307 | return NULL_TREE; |
| 4308 | } |
| 4309 | |
| 4310 | |
| 4311 | /* Scan all the statements starting at the current statement. CTX |
| 4312 | contains context information about the OMP directives and |
| 4313 | clauses found during the scan. */ |
| 4314 | |
| 4315 | static void |
| 4316 | scan_omp (gimple_seq *body_p, omp_context *ctx) |
| 4317 | { |
| 4318 | location_t saved_location; |
| 4319 | struct walk_stmt_info wi; |
| 4320 | |
| 4321 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 4322 | wi.info = ctx; |
| 4323 | wi.want_locations = true; |
| 4324 | |
| 4325 | saved_location = input_location; |
| 4326 | walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi); |
| 4327 | input_location = saved_location; |
| 4328 | } |
| 4329 | |
| 4330 | /* Re-gimplification and code generation routines. */ |
| 4331 | |
| 4332 | /* Remove omp_member_access_dummy_var variables from gimple_bind_vars |
| 4333 | of BIND if in a method. */ |
| 4334 | |
| 4335 | static void |
| 4336 | maybe_remove_omp_member_access_dummy_vars (gbind *bind) |
| 4337 | { |
| 4338 | if (DECL_ARGUMENTS (current_function_decl) |
| 4339 | && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl)) |
| 4340 | && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl))) |
| 4341 | == POINTER_TYPE)) |
| 4342 | { |
| 4343 | tree vars = gimple_bind_vars (bind_stmt: bind); |
| 4344 | for (tree *pvar = &vars; *pvar; ) |
| 4345 | if (omp_member_access_dummy_var (decl: *pvar)) |
| 4346 | *pvar = DECL_CHAIN (*pvar); |
| 4347 | else |
| 4348 | pvar = &DECL_CHAIN (*pvar); |
| 4349 | gimple_bind_set_vars (bind_stmt: bind, vars); |
| 4350 | } |
| 4351 | } |
| 4352 | |
| 4353 | /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of |
| 4354 | block and its subblocks. */ |
| 4355 | |
| 4356 | static void |
| 4357 | remove_member_access_dummy_vars (tree block) |
| 4358 | { |
| 4359 | for (tree *pvar = &BLOCK_VARS (block); *pvar; ) |
| 4360 | if (omp_member_access_dummy_var (decl: *pvar)) |
| 4361 | *pvar = DECL_CHAIN (*pvar); |
| 4362 | else |
| 4363 | pvar = &DECL_CHAIN (*pvar); |
| 4364 | |
| 4365 | for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block)) |
| 4366 | remove_member_access_dummy_vars (block); |
| 4367 | } |
| 4368 | |
| 4369 | /* If a context was created for STMT when it was scanned, return it. */ |
| 4370 | |
| 4371 | static omp_context * |
| 4372 | maybe_lookup_ctx (gimple *stmt) |
| 4373 | { |
| 4374 | splay_tree_node n; |
| 4375 | n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt); |
| 4376 | return n ? (omp_context *) n->value : NULL; |
| 4377 | } |
| 4378 | |
| 4379 | |
| 4380 | /* Find the mapping for DECL in CTX or the immediately enclosing |
| 4381 | context that has a mapping for DECL. |
| 4382 | |
| 4383 | If CTX is a nested parallel directive, we may have to use the decl |
| 4384 | mappings created in CTX's parent context. Suppose that we have the |
| 4385 | following parallel nesting (variable UIDs showed for clarity): |
| 4386 | |
| 4387 | iD.1562 = 0; |
| 4388 | #omp parallel shared(iD.1562) -> outer parallel |
| 4389 | iD.1562 = iD.1562 + 1; |
| 4390 | |
| 4391 | #omp parallel shared (iD.1562) -> inner parallel |
| 4392 | iD.1562 = iD.1562 - 1; |
| 4393 | |
| 4394 | Each parallel structure will create a distinct .omp_data_s structure |
| 4395 | for copying iD.1562 in/out of the directive: |
| 4396 | |
| 4397 | outer parallel .omp_data_s.1.i -> iD.1562 |
| 4398 | inner parallel .omp_data_s.2.i -> iD.1562 |
| 4399 | |
| 4400 | A shared variable mapping will produce a copy-out operation before |
| 4401 | the parallel directive and a copy-in operation after it. So, in |
| 4402 | this case we would have: |
| 4403 | |
| 4404 | iD.1562 = 0; |
| 4405 | .omp_data_o.1.i = iD.1562; |
| 4406 | #omp parallel shared(iD.1562) -> outer parallel |
| 4407 | .omp_data_i.1 = &.omp_data_o.1 |
| 4408 | .omp_data_i.1->i = .omp_data_i.1->i + 1; |
| 4409 | |
| 4410 | .omp_data_o.2.i = iD.1562; -> ** |
| 4411 | #omp parallel shared(iD.1562) -> inner parallel |
| 4412 | .omp_data_i.2 = &.omp_data_o.2 |
| 4413 | .omp_data_i.2->i = .omp_data_i.2->i - 1; |
| 4414 | |
| 4415 | |
| 4416 | ** This is a problem. The symbol iD.1562 cannot be referenced |
| 4417 | inside the body of the outer parallel region. But since we are |
| 4418 | emitting this copy operation while expanding the inner parallel |
| 4419 | directive, we need to access the CTX structure of the outer |
| 4420 | parallel directive to get the correct mapping: |
| 4421 | |
| 4422 | .omp_data_o.2.i = .omp_data_i.1->i |
| 4423 | |
| 4424 | Since there may be other workshare or parallel directives enclosing |
| 4425 | the parallel directive, it may be necessary to walk up the context |
| 4426 | parent chain. This is not a problem in general because nested |
| 4427 | parallelism happens only rarely. */ |
| 4428 | |
| 4429 | static tree |
| 4430 | lookup_decl_in_outer_ctx (tree decl, omp_context *ctx) |
| 4431 | { |
| 4432 | tree t; |
| 4433 | omp_context *up; |
| 4434 | |
| 4435 | for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer) |
| 4436 | t = maybe_lookup_decl (var: decl, ctx: up); |
| 4437 | |
| 4438 | gcc_assert (!ctx->is_nested || t || is_global_var (decl)); |
| 4439 | |
| 4440 | return t ? t : decl; |
| 4441 | } |
| 4442 | |
| 4443 | |
| 4444 | /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found |
| 4445 | in outer contexts. */ |
| 4446 | |
| 4447 | static tree |
| 4448 | maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx) |
| 4449 | { |
| 4450 | tree t = NULL; |
| 4451 | omp_context *up; |
| 4452 | |
| 4453 | for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer) |
| 4454 | t = maybe_lookup_decl (var: decl, ctx: up); |
| 4455 | |
| 4456 | return t ? t : decl; |
| 4457 | } |
| 4458 | |
| 4459 | |
| 4460 | /* Construct the initialization value for reduction operation OP. */ |
| 4461 | |
| 4462 | tree |
| 4463 | omp_reduction_init_op (location_t loc, enum tree_code op, tree type) |
| 4464 | { |
| 4465 | switch (op) |
| 4466 | { |
| 4467 | case PLUS_EXPR: |
| 4468 | case MINUS_EXPR: |
| 4469 | case BIT_IOR_EXPR: |
| 4470 | case BIT_XOR_EXPR: |
| 4471 | case TRUTH_OR_EXPR: |
| 4472 | case TRUTH_ORIF_EXPR: |
| 4473 | case TRUTH_XOR_EXPR: |
| 4474 | case NE_EXPR: |
| 4475 | return build_zero_cst (type); |
| 4476 | |
| 4477 | case MULT_EXPR: |
| 4478 | case TRUTH_AND_EXPR: |
| 4479 | case TRUTH_ANDIF_EXPR: |
| 4480 | case EQ_EXPR: |
| 4481 | return fold_convert_loc (loc, type, integer_one_node); |
| 4482 | |
| 4483 | case BIT_AND_EXPR: |
| 4484 | return fold_convert_loc (loc, type, integer_minus_one_node); |
| 4485 | |
| 4486 | case MAX_EXPR: |
| 4487 | if (SCALAR_FLOAT_TYPE_P (type)) |
| 4488 | { |
| 4489 | REAL_VALUE_TYPE min; |
| 4490 | if (HONOR_INFINITIES (type)) |
| 4491 | real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL); |
| 4492 | else |
| 4493 | real_maxval (&min, 1, TYPE_MODE (type)); |
| 4494 | return build_real (type, min); |
| 4495 | } |
| 4496 | else if (POINTER_TYPE_P (type)) |
| 4497 | { |
| 4498 | wide_int min |
| 4499 | = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type)); |
| 4500 | return wide_int_to_tree (type, cst: min); |
| 4501 | } |
| 4502 | else |
| 4503 | { |
| 4504 | gcc_assert (INTEGRAL_TYPE_P (type)); |
| 4505 | return TYPE_MIN_VALUE (type); |
| 4506 | } |
| 4507 | |
| 4508 | case MIN_EXPR: |
| 4509 | if (SCALAR_FLOAT_TYPE_P (type)) |
| 4510 | { |
| 4511 | REAL_VALUE_TYPE max; |
| 4512 | if (HONOR_INFINITIES (type)) |
| 4513 | max = dconstinf; |
| 4514 | else |
| 4515 | real_maxval (&max, 0, TYPE_MODE (type)); |
| 4516 | return build_real (type, max); |
| 4517 | } |
| 4518 | else if (POINTER_TYPE_P (type)) |
| 4519 | { |
| 4520 | wide_int max |
| 4521 | = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type)); |
| 4522 | return wide_int_to_tree (type, cst: max); |
| 4523 | } |
| 4524 | else |
| 4525 | { |
| 4526 | gcc_assert (INTEGRAL_TYPE_P (type)); |
| 4527 | return TYPE_MAX_VALUE (type); |
| 4528 | } |
| 4529 | |
| 4530 | default: |
| 4531 | gcc_unreachable (); |
| 4532 | } |
| 4533 | } |
| 4534 | |
| 4535 | /* Construct the initialization value for reduction CLAUSE. */ |
| 4536 | |
| 4537 | tree |
| 4538 | omp_reduction_init (tree clause, tree type) |
| 4539 | { |
| 4540 | return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause), |
| 4541 | OMP_CLAUSE_REDUCTION_CODE (clause), type); |
| 4542 | } |
| 4543 | |
| 4544 | /* Return alignment to be assumed for var in CLAUSE, which should be |
| 4545 | OMP_CLAUSE_ALIGNED. */ |
| 4546 | |
| 4547 | static tree |
| 4548 | omp_clause_aligned_alignment (tree clause) |
| 4549 | { |
| 4550 | if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause)) |
| 4551 | return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause); |
| 4552 | |
| 4553 | /* Otherwise return implementation defined alignment. */ |
| 4554 | unsigned int al = 1; |
| 4555 | opt_scalar_mode mode_iter; |
| 4556 | auto_vector_modes modes; |
| 4557 | targetm.vectorize.autovectorize_vector_modes (&modes, true); |
| 4558 | static enum mode_class classes[] |
| 4559 | = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT }; |
| 4560 | for (int i = 0; i < 4; i += 2) |
| 4561 | /* The for loop above dictates that we only walk through scalar classes. */ |
| 4562 | FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i]) |
| 4563 | { |
| 4564 | scalar_mode mode = mode_iter.require (); |
| 4565 | machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode); |
| 4566 | if (GET_MODE_CLASS (vmode) != classes[i + 1]) |
| 4567 | continue; |
| 4568 | machine_mode alt_vmode; |
| 4569 | for (unsigned int j = 0; j < modes.length (); ++j) |
| 4570 | if (related_vector_mode (modes[j], mode).exists (mode: &alt_vmode) |
| 4571 | && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode))) |
| 4572 | vmode = alt_vmode; |
| 4573 | |
| 4574 | tree type = lang_hooks.types.type_for_mode (mode, 1); |
| 4575 | if (type == NULL_TREE || TYPE_MODE (type) != mode) |
| 4576 | continue; |
| 4577 | type = build_vector_type_for_mode (type, vmode); |
| 4578 | if (TYPE_MODE (type) != vmode) |
| 4579 | continue; |
| 4580 | if (TYPE_ALIGN_UNIT (type) > al) |
| 4581 | al = TYPE_ALIGN_UNIT (type); |
| 4582 | } |
| 4583 | return build_int_cst (integer_type_node, al); |
| 4584 | } |
| 4585 | |
| 4586 | |
| 4587 | /* This structure is part of the interface between lower_rec_simd_input_clauses |
| 4588 | and lower_rec_input_clauses. */ |
| 4589 | |
| 4590 | class omplow_simd_context { |
| 4591 | public: |
| 4592 | omplow_simd_context () { memset (s: this, c: 0, n: sizeof (*this)); } |
| 4593 | tree idx; |
| 4594 | tree lane; |
| 4595 | tree lastlane; |
| 4596 | vec<tree, va_heap> simt_eargs; |
| 4597 | gimple_seq simt_dlist; |
| 4598 | poly_uint64 max_vf; |
| 4599 | bool is_simt; |
| 4600 | }; |
| 4601 | |
| 4602 | /* Helper function of lower_rec_input_clauses, used for #pragma omp simd |
| 4603 | privatization. */ |
| 4604 | |
| 4605 | static bool |
| 4606 | lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, |
| 4607 | omplow_simd_context *sctx, tree &ivar, |
| 4608 | tree &lvar, tree *rvar = NULL, |
| 4609 | tree *rvar2 = NULL) |
| 4610 | { |
| 4611 | if (known_eq (sctx->max_vf, 0U)) |
| 4612 | { |
| 4613 | sctx->max_vf = (sctx->is_simt ? omp_max_simt_vf () |
| 4614 | : omp_max_vf (omp_maybe_offloaded_ctx (ctx))); |
| 4615 | if (maybe_gt (sctx->max_vf, 1U)) |
| 4616 | { |
| 4617 | tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 4618 | kind: OMP_CLAUSE_SAFELEN); |
| 4619 | if (c) |
| 4620 | { |
| 4621 | poly_uint64 safe_len; |
| 4622 | if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), value: &safe_len) |
| 4623 | || maybe_lt (a: safe_len, b: 1U)) |
| 4624 | sctx->max_vf = 1; |
| 4625 | else |
| 4626 | sctx->max_vf = lower_bound (a: sctx->max_vf, b: safe_len); |
| 4627 | } |
| 4628 | } |
| 4629 | if (sctx->is_simt && !known_eq (sctx->max_vf, 1U)) |
| 4630 | { |
| 4631 | for (tree c = gimple_omp_for_clauses (gs: ctx->stmt); c; |
| 4632 | c = OMP_CLAUSE_CHAIN (c)) |
| 4633 | { |
| 4634 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 4635 | continue; |
| 4636 | |
| 4637 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 4638 | { |
| 4639 | /* UDR reductions are not supported yet for SIMT, disable |
| 4640 | SIMT. */ |
| 4641 | sctx->max_vf = 1; |
| 4642 | break; |
| 4643 | } |
| 4644 | |
| 4645 | if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c)) |
| 4646 | && !INTEGRAL_TYPE_P (TREE_TYPE (new_var))) |
| 4647 | { |
| 4648 | /* Doing boolean operations on non-integral types is |
| 4649 | for conformance only, it's not worth supporting this |
| 4650 | for SIMT. */ |
| 4651 | sctx->max_vf = 1; |
| 4652 | break; |
| 4653 | } |
| 4654 | } |
| 4655 | } |
| 4656 | if (maybe_gt (sctx->max_vf, 1U)) |
| 4657 | { |
| 4658 | sctx->idx = create_tmp_var (unsigned_type_node); |
| 4659 | sctx->lane = create_tmp_var (unsigned_type_node); |
| 4660 | } |
| 4661 | } |
| 4662 | if (known_eq (sctx->max_vf, 1U)) |
| 4663 | return false; |
| 4664 | |
| 4665 | if (sctx->is_simt) |
| 4666 | { |
| 4667 | if (is_gimple_reg (new_var)) |
| 4668 | { |
| 4669 | ivar = lvar = new_var; |
| 4670 | return true; |
| 4671 | } |
| 4672 | tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type); |
| 4673 | ivar = lvar = create_tmp_var (type); |
| 4674 | TREE_ADDRESSABLE (ivar) = 1; |
| 4675 | DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private" ), |
| 4676 | NULL, DECL_ATTRIBUTES (ivar)); |
| 4677 | sctx->simt_eargs.safe_push (obj: build1 (ADDR_EXPR, ptype, ivar)); |
| 4678 | tree clobber = build_clobber (type); |
| 4679 | gimple *g = gimple_build_assign (ivar, clobber); |
| 4680 | gimple_seq_add_stmt (&sctx->simt_dlist, g); |
| 4681 | } |
| 4682 | else |
| 4683 | { |
| 4684 | tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf); |
| 4685 | tree avar = create_tmp_var_raw (atype); |
| 4686 | if (TREE_ADDRESSABLE (new_var)) |
| 4687 | TREE_ADDRESSABLE (avar) = 1; |
| 4688 | DECL_ATTRIBUTES (avar) |
| 4689 | = tree_cons (get_identifier ("omp simd array" ), NULL, |
| 4690 | DECL_ATTRIBUTES (avar)); |
| 4691 | gimple_add_tmp_var (avar); |
| 4692 | tree iavar = avar; |
| 4693 | if (rvar && !ctx->for_simd_scan_phase) |
| 4694 | { |
| 4695 | /* For inscan reductions, create another array temporary, |
| 4696 | which will hold the reduced value. */ |
| 4697 | iavar = create_tmp_var_raw (atype); |
| 4698 | if (TREE_ADDRESSABLE (new_var)) |
| 4699 | TREE_ADDRESSABLE (iavar) = 1; |
| 4700 | DECL_ATTRIBUTES (iavar) |
| 4701 | = tree_cons (get_identifier ("omp simd array" ), NULL, |
| 4702 | tree_cons (get_identifier ("omp simd inscan" ), NULL, |
| 4703 | DECL_ATTRIBUTES (iavar))); |
| 4704 | gimple_add_tmp_var (iavar); |
| 4705 | ctx->cb.decl_map->put (k: avar, v: iavar); |
| 4706 | if (sctx->lastlane == NULL_TREE) |
| 4707 | sctx->lastlane = create_tmp_var (unsigned_type_node); |
| 4708 | *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, |
| 4709 | sctx->lastlane, NULL_TREE, NULL_TREE); |
| 4710 | TREE_THIS_NOTRAP (*rvar) = 1; |
| 4711 | |
| 4712 | if (ctx->scan_exclusive) |
| 4713 | { |
| 4714 | /* And for exclusive scan yet another one, which will |
| 4715 | hold the value during the scan phase. */ |
| 4716 | tree savar = create_tmp_var_raw (atype); |
| 4717 | if (TREE_ADDRESSABLE (new_var)) |
| 4718 | TREE_ADDRESSABLE (savar) = 1; |
| 4719 | DECL_ATTRIBUTES (savar) |
| 4720 | = tree_cons (get_identifier ("omp simd array" ), NULL, |
| 4721 | tree_cons (get_identifier ("omp simd inscan " |
| 4722 | "exclusive" ), NULL, |
| 4723 | DECL_ATTRIBUTES (savar))); |
| 4724 | gimple_add_tmp_var (savar); |
| 4725 | ctx->cb.decl_map->put (k: iavar, v: savar); |
| 4726 | *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar, |
| 4727 | sctx->idx, NULL_TREE, NULL_TREE); |
| 4728 | TREE_THIS_NOTRAP (*rvar2) = 1; |
| 4729 | } |
| 4730 | } |
| 4731 | ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx, |
| 4732 | NULL_TREE, NULL_TREE); |
| 4733 | lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane, |
| 4734 | NULL_TREE, NULL_TREE); |
| 4735 | TREE_THIS_NOTRAP (ivar) = 1; |
| 4736 | TREE_THIS_NOTRAP (lvar) = 1; |
| 4737 | } |
| 4738 | if (DECL_P (new_var)) |
| 4739 | { |
| 4740 | SET_DECL_VALUE_EXPR (new_var, lvar); |
| 4741 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 4742 | } |
| 4743 | return true; |
| 4744 | } |
| 4745 | |
| 4746 | /* Helper function of lower_rec_input_clauses. For a reference |
| 4747 | in simd reduction, add an underlying variable it will reference. */ |
| 4748 | |
| 4749 | static void |
| 4750 | handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist) |
| 4751 | { |
| 4752 | tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard))); |
| 4753 | if (TREE_CONSTANT (z)) |
| 4754 | { |
| 4755 | z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)), |
| 4756 | get_name (new_vard)); |
| 4757 | gimple_add_tmp_var (z); |
| 4758 | TREE_ADDRESSABLE (z) = 1; |
| 4759 | z = build_fold_addr_expr_loc (loc, z); |
| 4760 | gimplify_assign (new_vard, z, ilist); |
| 4761 | } |
| 4762 | } |
| 4763 | |
| 4764 | /* Helper function for lower_rec_input_clauses. Emit into ilist sequence |
| 4765 | code to emit (type) (tskred_temp[idx]). */ |
| 4766 | |
| 4767 | static tree |
| 4768 | task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type, |
| 4769 | unsigned idx) |
| 4770 | { |
| 4771 | unsigned HOST_WIDE_INT sz |
| 4772 | = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node)); |
| 4773 | tree r = build2 (MEM_REF, pointer_sized_int_node, |
| 4774 | tskred_temp, build_int_cst (TREE_TYPE (tskred_temp), |
| 4775 | idx * sz)); |
| 4776 | tree v = create_tmp_var (pointer_sized_int_node); |
| 4777 | gimple *g = gimple_build_assign (v, r); |
| 4778 | gimple_seq_add_stmt (ilist, g); |
| 4779 | if (!useless_type_conversion_p (type, pointer_sized_int_node)) |
| 4780 | { |
| 4781 | v = create_tmp_var (type); |
| 4782 | g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (gs: g)); |
| 4783 | gimple_seq_add_stmt (ilist, g); |
| 4784 | } |
| 4785 | return v; |
| 4786 | } |
| 4787 | |
| 4788 | /* Lower early initialization of privatized variable NEW_VAR |
| 4789 | if it needs an allocator (has allocate clause). */ |
| 4790 | |
| 4791 | static bool |
| 4792 | lower_private_allocate (tree var, tree new_var, tree &allocator, |
| 4793 | tree &allocate_ptr, gimple_seq *ilist, |
| 4794 | omp_context *ctx, bool is_ref, tree size) |
| 4795 | { |
| 4796 | if (allocator) |
| 4797 | return false; |
| 4798 | gcc_assert (allocate_ptr == NULL_TREE); |
| 4799 | if (ctx->allocate_map |
| 4800 | && (DECL_P (new_var) || (TYPE_P (new_var) && size))) |
| 4801 | if (tree *allocatorp = ctx->allocate_map->get (k: var)) |
| 4802 | allocator = *allocatorp; |
| 4803 | if (allocator == NULL_TREE) |
| 4804 | return false; |
| 4805 | if (!is_ref && omp_privatize_by_reference (decl: var)) |
| 4806 | { |
| 4807 | allocator = NULL_TREE; |
| 4808 | return false; |
| 4809 | } |
| 4810 | |
| 4811 | unsigned HOST_WIDE_INT ialign = 0; |
| 4812 | if (TREE_CODE (allocator) == TREE_LIST) |
| 4813 | { |
| 4814 | ialign = tree_to_uhwi (TREE_VALUE (allocator)); |
| 4815 | allocator = TREE_PURPOSE (allocator); |
| 4816 | } |
| 4817 | if (TREE_CODE (allocator) != INTEGER_CST) |
| 4818 | allocator = build_outer_var_ref (var: allocator, ctx, code: OMP_CLAUSE_ALLOCATE); |
| 4819 | allocator = fold_convert (pointer_sized_int_node, allocator); |
| 4820 | if (TREE_CODE (allocator) != INTEGER_CST) |
| 4821 | { |
| 4822 | tree var = create_tmp_var (TREE_TYPE (allocator)); |
| 4823 | gimplify_assign (var, allocator, ilist); |
| 4824 | allocator = var; |
| 4825 | } |
| 4826 | |
| 4827 | tree ptr_type, align, sz = size; |
| 4828 | if (TYPE_P (new_var)) |
| 4829 | { |
| 4830 | ptr_type = build_pointer_type (new_var); |
| 4831 | ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var)); |
| 4832 | } |
| 4833 | else if (is_ref) |
| 4834 | { |
| 4835 | ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var))); |
| 4836 | ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type))); |
| 4837 | } |
| 4838 | else |
| 4839 | { |
| 4840 | ptr_type = build_pointer_type (TREE_TYPE (new_var)); |
| 4841 | ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var)); |
| 4842 | if (sz == NULL_TREE) |
| 4843 | sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var)); |
| 4844 | } |
| 4845 | align = build_int_cst (size_type_node, ialign); |
| 4846 | if (TREE_CODE (sz) != INTEGER_CST) |
| 4847 | { |
| 4848 | tree szvar = create_tmp_var (size_type_node); |
| 4849 | gimplify_assign (szvar, sz, ilist); |
| 4850 | sz = szvar; |
| 4851 | } |
| 4852 | allocate_ptr = create_tmp_var (ptr_type); |
| 4853 | tree a = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC); |
| 4854 | gimple *g = gimple_build_call (a, 3, align, sz, allocator); |
| 4855 | gimple_call_set_lhs (gs: g, lhs: allocate_ptr); |
| 4856 | gimple_seq_add_stmt (ilist, g); |
| 4857 | if (!is_ref) |
| 4858 | { |
| 4859 | tree x = build_simple_mem_ref (allocate_ptr); |
| 4860 | TREE_THIS_NOTRAP (x) = 1; |
| 4861 | SET_DECL_VALUE_EXPR (new_var, x); |
| 4862 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 4863 | } |
| 4864 | return true; |
| 4865 | } |
| 4866 | |
| 4867 | /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN, |
| 4868 | from the receiver (aka child) side and initializers for REFERENCE_TYPE |
| 4869 | private variables. Initialization statements go in ILIST, while calls |
| 4870 | to destructors go in DLIST. */ |
| 4871 | |
| 4872 | static void |
| 4873 | lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist, |
| 4874 | omp_context *ctx, struct omp_for_data *fd) |
| 4875 | { |
| 4876 | tree c, copyin_seq, x, ptr; |
| 4877 | bool copyin_by_ref = false; |
| 4878 | bool lastprivate_firstprivate = false; |
| 4879 | bool reduction_omp_orig_ref = false; |
| 4880 | int pass; |
| 4881 | bool is_simd = (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 4882 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD); |
| 4883 | omplow_simd_context sctx = omplow_simd_context (); |
| 4884 | tree simt_lane = NULL_TREE, simtrec = NULL_TREE; |
| 4885 | tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE; |
| 4886 | gimple_seq llist[4] = { }; |
| 4887 | tree nonconst_simd_if = NULL_TREE; |
| 4888 | |
| 4889 | copyin_seq = NULL; |
| 4890 | sctx.is_simt = is_simd && omp_find_clause (clauses, kind: OMP_CLAUSE__SIMT_); |
| 4891 | |
| 4892 | /* Set max_vf=1 (which will later enforce safelen=1) in simd loops |
| 4893 | with data sharing clauses referencing variable sized vars. That |
| 4894 | is unnecessarily hard to support and very unlikely to result in |
| 4895 | vectorized code anyway. */ |
| 4896 | if (is_simd) |
| 4897 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 4898 | switch (OMP_CLAUSE_CODE (c)) |
| 4899 | { |
| 4900 | case OMP_CLAUSE_LINEAR: |
| 4901 | if (OMP_CLAUSE_LINEAR_ARRAY (c)) |
| 4902 | sctx.max_vf = 1; |
| 4903 | /* FALLTHRU */ |
| 4904 | case OMP_CLAUSE_PRIVATE: |
| 4905 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 4906 | case OMP_CLAUSE_LASTPRIVATE: |
| 4907 | if (is_variable_sized (OMP_CLAUSE_DECL (c))) |
| 4908 | sctx.max_vf = 1; |
| 4909 | else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c))) |
| 4910 | { |
| 4911 | tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c))); |
| 4912 | if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype))) |
| 4913 | sctx.max_vf = 1; |
| 4914 | } |
| 4915 | break; |
| 4916 | case OMP_CLAUSE_REDUCTION: |
| 4917 | case OMP_CLAUSE_IN_REDUCTION: |
| 4918 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF |
| 4919 | || is_variable_sized (OMP_CLAUSE_DECL (c))) |
| 4920 | sctx.max_vf = 1; |
| 4921 | else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c))) |
| 4922 | { |
| 4923 | tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c))); |
| 4924 | if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype))) |
| 4925 | sctx.max_vf = 1; |
| 4926 | } |
| 4927 | break; |
| 4928 | case OMP_CLAUSE_IF: |
| 4929 | if (integer_zerop (OMP_CLAUSE_IF_EXPR (c))) |
| 4930 | sctx.max_vf = 1; |
| 4931 | else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST) |
| 4932 | nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c); |
| 4933 | break; |
| 4934 | case OMP_CLAUSE_SIMDLEN: |
| 4935 | if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c))) |
| 4936 | sctx.max_vf = 1; |
| 4937 | break; |
| 4938 | case OMP_CLAUSE__CONDTEMP_: |
| 4939 | /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */ |
| 4940 | if (sctx.is_simt) |
| 4941 | sctx.max_vf = 1; |
| 4942 | break; |
| 4943 | default: |
| 4944 | continue; |
| 4945 | } |
| 4946 | |
| 4947 | /* Add a placeholder for simduid. */ |
| 4948 | if (sctx.is_simt && maybe_ne (a: sctx.max_vf, b: 1U)) |
| 4949 | sctx.simt_eargs.safe_push (NULL_TREE); |
| 4950 | |
| 4951 | unsigned task_reduction_cnt = 0; |
| 4952 | unsigned task_reduction_cntorig = 0; |
| 4953 | unsigned task_reduction_cnt_full = 0; |
| 4954 | unsigned task_reduction_cntorig_full = 0; |
| 4955 | unsigned task_reduction_other_cnt = 0; |
| 4956 | tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE; |
| 4957 | tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE; |
| 4958 | /* Do all the fixed sized types in the first pass, and the variable sized |
| 4959 | types in the second pass. This makes sure that the scalar arguments to |
| 4960 | the variable sized types are processed before we use them in the |
| 4961 | variable sized operations. For task reductions we use 4 passes, in the |
| 4962 | first two we ignore them, in the third one gather arguments for |
| 4963 | GOMP_task_reduction_remap call and in the last pass actually handle |
| 4964 | the task reductions. */ |
| 4965 | for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt) |
| 4966 | ? 4 : 2); ++pass) |
| 4967 | { |
| 4968 | if (pass == 2 && task_reduction_cnt) |
| 4969 | { |
| 4970 | tskred_atype |
| 4971 | = build_array_type_nelts (ptr_type_node, task_reduction_cnt |
| 4972 | + task_reduction_cntorig); |
| 4973 | tskred_avar = create_tmp_var_raw (tskred_atype); |
| 4974 | gimple_add_tmp_var (tskred_avar); |
| 4975 | TREE_ADDRESSABLE (tskred_avar) = 1; |
| 4976 | task_reduction_cnt_full = task_reduction_cnt; |
| 4977 | task_reduction_cntorig_full = task_reduction_cntorig; |
| 4978 | } |
| 4979 | else if (pass == 3 && task_reduction_cnt) |
| 4980 | { |
| 4981 | x = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASK_REDUCTION_REMAP); |
| 4982 | gimple *g |
| 4983 | = gimple_build_call (x, 3, size_int (task_reduction_cnt), |
| 4984 | size_int (task_reduction_cntorig), |
| 4985 | build_fold_addr_expr (tskred_avar)); |
| 4986 | gimple_seq_add_stmt (ilist, g); |
| 4987 | } |
| 4988 | if (pass == 3 && task_reduction_other_cnt) |
| 4989 | { |
| 4990 | /* For reduction clauses, build |
| 4991 | tskred_base = (void *) tskred_temp[2] |
| 4992 | + omp_get_thread_num () * tskred_temp[1] |
| 4993 | or if tskred_temp[1] is known to be constant, that constant |
| 4994 | directly. This is the start of the private reduction copy block |
| 4995 | for the current thread. */ |
| 4996 | tree v = create_tmp_var (integer_type_node); |
| 4997 | x = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM); |
| 4998 | gimple *g = gimple_build_call (x, 0); |
| 4999 | gimple_call_set_lhs (gs: g, lhs: v); |
| 5000 | gimple_seq_add_stmt (ilist, g); |
| 5001 | c = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_); |
| 5002 | tskred_temp = OMP_CLAUSE_DECL (c); |
| 5003 | if (is_taskreg_ctx (ctx)) |
| 5004 | tskred_temp = lookup_decl (var: tskred_temp, ctx); |
| 5005 | tree v2 = create_tmp_var (sizetype); |
| 5006 | g = gimple_build_assign (v2, NOP_EXPR, v); |
| 5007 | gimple_seq_add_stmt (ilist, g); |
| 5008 | if (ctx->task_reductions[0]) |
| 5009 | v = fold_convert (sizetype, ctx->task_reductions[0]); |
| 5010 | else |
| 5011 | v = task_reduction_read (ilist, tskred_temp, sizetype, idx: 1); |
| 5012 | tree v3 = create_tmp_var (sizetype); |
| 5013 | g = gimple_build_assign (v3, MULT_EXPR, v2, v); |
| 5014 | gimple_seq_add_stmt (ilist, g); |
| 5015 | v = task_reduction_read (ilist, tskred_temp, ptr_type_node, idx: 2); |
| 5016 | tskred_base = create_tmp_var (ptr_type_node); |
| 5017 | g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3); |
| 5018 | gimple_seq_add_stmt (ilist, g); |
| 5019 | } |
| 5020 | task_reduction_cnt = 0; |
| 5021 | task_reduction_cntorig = 0; |
| 5022 | task_reduction_other_cnt = 0; |
| 5023 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 5024 | { |
| 5025 | enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c); |
| 5026 | tree var, new_var; |
| 5027 | bool by_ref; |
| 5028 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 5029 | bool task_reduction_p = false; |
| 5030 | bool task_reduction_needs_orig_p = false; |
| 5031 | tree cond = NULL_TREE; |
| 5032 | tree allocator, allocate_ptr; |
| 5033 | |
| 5034 | switch (c_kind) |
| 5035 | { |
| 5036 | case OMP_CLAUSE_PRIVATE: |
| 5037 | if (OMP_CLAUSE_PRIVATE_DEBUG (c)) |
| 5038 | continue; |
| 5039 | break; |
| 5040 | case OMP_CLAUSE_SHARED: |
| 5041 | /* Ignore shared directives in teams construct inside |
| 5042 | of target construct. */ |
| 5043 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 5044 | && !is_host_teams_ctx (ctx)) |
| 5045 | continue; |
| 5046 | if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL) |
| 5047 | { |
| 5048 | gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) |
| 5049 | || is_global_var (OMP_CLAUSE_DECL (c))); |
| 5050 | continue; |
| 5051 | } |
| 5052 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 5053 | case OMP_CLAUSE_COPYIN: |
| 5054 | break; |
| 5055 | case OMP_CLAUSE_LINEAR: |
| 5056 | if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) |
| 5057 | && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) |
| 5058 | lastprivate_firstprivate = true; |
| 5059 | break; |
| 5060 | case OMP_CLAUSE_REDUCTION: |
| 5061 | case OMP_CLAUSE_IN_REDUCTION: |
| 5062 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
| 5063 | || is_task_ctx (ctx) |
| 5064 | || OMP_CLAUSE_REDUCTION_TASK (c)) |
| 5065 | { |
| 5066 | task_reduction_p = true; |
| 5067 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
| 5068 | { |
| 5069 | task_reduction_other_cnt++; |
| 5070 | if (pass == 2) |
| 5071 | continue; |
| 5072 | } |
| 5073 | else |
| 5074 | task_reduction_cnt++; |
| 5075 | if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) |
| 5076 | { |
| 5077 | var = OMP_CLAUSE_DECL (c); |
| 5078 | /* If var is a global variable that isn't privatized |
| 5079 | in outer contexts, we don't need to look up the |
| 5080 | original address, it is always the address of the |
| 5081 | global variable itself. */ |
| 5082 | if (!DECL_P (var) |
| 5083 | || omp_privatize_by_reference (decl: var) |
| 5084 | || !is_global_var |
| 5085 | (t: maybe_lookup_decl_in_outer_ctx (decl: var, ctx))) |
| 5086 | { |
| 5087 | task_reduction_needs_orig_p = true; |
| 5088 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 5089 | task_reduction_cntorig++; |
| 5090 | } |
| 5091 | } |
| 5092 | } |
| 5093 | else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) |
| 5094 | reduction_omp_orig_ref = true; |
| 5095 | break; |
| 5096 | case OMP_CLAUSE__REDUCTEMP_: |
| 5097 | if (!is_taskreg_ctx (ctx)) |
| 5098 | continue; |
| 5099 | /* FALLTHRU */ |
| 5100 | case OMP_CLAUSE__LOOPTEMP_: |
| 5101 | /* Handle _looptemp_/_reductemp_ clauses only on |
| 5102 | parallel/task. */ |
| 5103 | if (fd) |
| 5104 | continue; |
| 5105 | break; |
| 5106 | case OMP_CLAUSE_LASTPRIVATE: |
| 5107 | if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) |
| 5108 | { |
| 5109 | lastprivate_firstprivate = true; |
| 5110 | if (pass != 0 || is_taskloop_ctx (ctx)) |
| 5111 | continue; |
| 5112 | } |
| 5113 | /* Even without corresponding firstprivate, if |
| 5114 | decl is Fortran allocatable, it needs outer var |
| 5115 | reference. */ |
| 5116 | else if (pass == 0 |
| 5117 | && lang_hooks.decls.omp_private_outer_ref |
| 5118 | (OMP_CLAUSE_DECL (c))) |
| 5119 | lastprivate_firstprivate = true; |
| 5120 | break; |
| 5121 | case OMP_CLAUSE_ALIGNED: |
| 5122 | if (pass != 1) |
| 5123 | continue; |
| 5124 | var = OMP_CLAUSE_DECL (c); |
| 5125 | if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE |
| 5126 | && !is_global_var (t: var)) |
| 5127 | { |
| 5128 | new_var = maybe_lookup_decl (var, ctx); |
| 5129 | if (new_var == NULL_TREE) |
| 5130 | new_var = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 5131 | x = builtin_decl_explicit (fncode: BUILT_IN_ASSUME_ALIGNED); |
| 5132 | tree alarg = omp_clause_aligned_alignment (clause: c); |
| 5133 | alarg = fold_convert_loc (clause_loc, size_type_node, alarg); |
| 5134 | x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg); |
| 5135 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 5136 | x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x); |
| 5137 | gimplify_and_add (x, ilist); |
| 5138 | } |
| 5139 | else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE |
| 5140 | && is_global_var (t: var)) |
| 5141 | { |
| 5142 | tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2; |
| 5143 | new_var = lookup_decl (var, ctx); |
| 5144 | t = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 5145 | t = build_fold_addr_expr_loc (clause_loc, t); |
| 5146 | t2 = builtin_decl_explicit (fncode: BUILT_IN_ASSUME_ALIGNED); |
| 5147 | tree alarg = omp_clause_aligned_alignment (clause: c); |
| 5148 | alarg = fold_convert_loc (clause_loc, size_type_node, alarg); |
| 5149 | t = build_call_expr_loc (clause_loc, t2, 2, t, alarg); |
| 5150 | t = fold_convert_loc (clause_loc, ptype, t); |
| 5151 | x = create_tmp_var (ptype); |
| 5152 | t = build2 (MODIFY_EXPR, ptype, x, t); |
| 5153 | gimplify_and_add (t, ilist); |
| 5154 | t = build_simple_mem_ref_loc (clause_loc, x); |
| 5155 | SET_DECL_VALUE_EXPR (new_var, t); |
| 5156 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 5157 | } |
| 5158 | continue; |
| 5159 | case OMP_CLAUSE__CONDTEMP_: |
| 5160 | if (is_parallel_ctx (ctx) |
| 5161 | || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))) |
| 5162 | break; |
| 5163 | continue; |
| 5164 | default: |
| 5165 | continue; |
| 5166 | } |
| 5167 | |
| 5168 | if (task_reduction_p != (pass >= 2)) |
| 5169 | continue; |
| 5170 | |
| 5171 | allocator = NULL_TREE; |
| 5172 | allocate_ptr = NULL_TREE; |
| 5173 | new_var = var = OMP_CLAUSE_DECL (c); |
| 5174 | if ((c_kind == OMP_CLAUSE_REDUCTION |
| 5175 | || c_kind == OMP_CLAUSE_IN_REDUCTION) |
| 5176 | && TREE_CODE (var) == MEM_REF) |
| 5177 | { |
| 5178 | var = TREE_OPERAND (var, 0); |
| 5179 | if (TREE_CODE (var) == POINTER_PLUS_EXPR) |
| 5180 | var = TREE_OPERAND (var, 0); |
| 5181 | if (TREE_CODE (var) == INDIRECT_REF |
| 5182 | || TREE_CODE (var) == ADDR_EXPR) |
| 5183 | var = TREE_OPERAND (var, 0); |
| 5184 | if (is_variable_sized (expr: var)) |
| 5185 | { |
| 5186 | gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); |
| 5187 | var = DECL_VALUE_EXPR (var); |
| 5188 | gcc_assert (TREE_CODE (var) == INDIRECT_REF); |
| 5189 | var = TREE_OPERAND (var, 0); |
| 5190 | gcc_assert (DECL_P (var)); |
| 5191 | } |
| 5192 | new_var = var; |
| 5193 | } |
| 5194 | if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (stmt: ctx->stmt)) |
| 5195 | { |
| 5196 | splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var); |
| 5197 | new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value; |
| 5198 | } |
| 5199 | else if (c_kind != OMP_CLAUSE_COPYIN) |
| 5200 | new_var = lookup_decl (var, ctx); |
| 5201 | |
| 5202 | if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN) |
| 5203 | { |
| 5204 | if (pass != 0) |
| 5205 | continue; |
| 5206 | } |
| 5207 | /* C/C++ array section reductions. */ |
| 5208 | else if ((c_kind == OMP_CLAUSE_REDUCTION |
| 5209 | || c_kind == OMP_CLAUSE_IN_REDUCTION) |
| 5210 | && var != OMP_CLAUSE_DECL (c)) |
| 5211 | { |
| 5212 | if (pass == 0) |
| 5213 | continue; |
| 5214 | |
| 5215 | tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); |
| 5216 | tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0); |
| 5217 | |
| 5218 | if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR) |
| 5219 | { |
| 5220 | tree b = TREE_OPERAND (orig_var, 1); |
| 5221 | if (is_omp_target (stmt: ctx->stmt)) |
| 5222 | b = NULL_TREE; |
| 5223 | else |
| 5224 | b = maybe_lookup_decl (var: b, ctx); |
| 5225 | if (b == NULL) |
| 5226 | { |
| 5227 | b = TREE_OPERAND (orig_var, 1); |
| 5228 | b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx); |
| 5229 | } |
| 5230 | if (integer_zerop (bias)) |
| 5231 | bias = b; |
| 5232 | else |
| 5233 | { |
| 5234 | bias = fold_convert_loc (clause_loc, |
| 5235 | TREE_TYPE (b), bias); |
| 5236 | bias = fold_build2_loc (clause_loc, PLUS_EXPR, |
| 5237 | TREE_TYPE (b), b, bias); |
| 5238 | } |
| 5239 | orig_var = TREE_OPERAND (orig_var, 0); |
| 5240 | } |
| 5241 | if (pass == 2) |
| 5242 | { |
| 5243 | tree out = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 5244 | if (is_global_var (t: out) |
| 5245 | && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE |
| 5246 | && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE |
| 5247 | || (TREE_CODE (TREE_TYPE (TREE_TYPE (out))) |
| 5248 | != POINTER_TYPE))) |
| 5249 | x = var; |
| 5250 | else if (is_omp_target (stmt: ctx->stmt)) |
| 5251 | x = out; |
| 5252 | else |
| 5253 | { |
| 5254 | bool by_ref = use_pointer_for_field (decl: var, NULL); |
| 5255 | x = build_receiver_ref (var, by_ref, ctx); |
| 5256 | if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE |
| 5257 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (var))) |
| 5258 | == POINTER_TYPE)) |
| 5259 | x = build_fold_addr_expr (x); |
| 5260 | } |
| 5261 | if (TREE_CODE (orig_var) == INDIRECT_REF) |
| 5262 | x = build_simple_mem_ref (x); |
| 5263 | else if (TREE_CODE (orig_var) == ADDR_EXPR) |
| 5264 | { |
| 5265 | if (var == TREE_OPERAND (orig_var, 0)) |
| 5266 | x = build_fold_addr_expr (x); |
| 5267 | } |
| 5268 | bias = fold_convert (sizetype, bias); |
| 5269 | x = fold_convert (ptr_type_node, x); |
| 5270 | x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, |
| 5271 | TREE_TYPE (x), x, bias); |
| 5272 | unsigned cnt = task_reduction_cnt - 1; |
| 5273 | if (!task_reduction_needs_orig_p) |
| 5274 | cnt += (task_reduction_cntorig_full |
| 5275 | - task_reduction_cntorig); |
| 5276 | else |
| 5277 | cnt = task_reduction_cntorig - 1; |
| 5278 | tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 5279 | size_int (cnt), NULL_TREE, NULL_TREE); |
| 5280 | gimplify_assign (r, x, ilist); |
| 5281 | continue; |
| 5282 | } |
| 5283 | |
| 5284 | if (TREE_CODE (orig_var) == INDIRECT_REF |
| 5285 | || TREE_CODE (orig_var) == ADDR_EXPR) |
| 5286 | orig_var = TREE_OPERAND (orig_var, 0); |
| 5287 | tree d = OMP_CLAUSE_DECL (c); |
| 5288 | tree type = TREE_TYPE (d); |
| 5289 | gcc_assert (TREE_CODE (type) == ARRAY_TYPE); |
| 5290 | tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
| 5291 | tree sz = v; |
| 5292 | const char *name = get_name (orig_var); |
| 5293 | if (pass != 3 && !TREE_CONSTANT (v)) |
| 5294 | { |
| 5295 | tree t; |
| 5296 | if (is_omp_target (stmt: ctx->stmt)) |
| 5297 | t = NULL_TREE; |
| 5298 | else |
| 5299 | t = maybe_lookup_decl (var: v, ctx); |
| 5300 | if (t) |
| 5301 | v = t; |
| 5302 | else |
| 5303 | v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx); |
| 5304 | gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue); |
| 5305 | t = fold_build2_loc (clause_loc, PLUS_EXPR, |
| 5306 | TREE_TYPE (v), v, |
| 5307 | build_int_cst (TREE_TYPE (v), 1)); |
| 5308 | sz = fold_build2_loc (clause_loc, MULT_EXPR, |
| 5309 | TREE_TYPE (v), t, |
| 5310 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5311 | } |
| 5312 | if (pass == 3) |
| 5313 | { |
| 5314 | tree xv = create_tmp_var (ptr_type_node); |
| 5315 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 5316 | { |
| 5317 | unsigned cnt = task_reduction_cnt - 1; |
| 5318 | if (!task_reduction_needs_orig_p) |
| 5319 | cnt += (task_reduction_cntorig_full |
| 5320 | - task_reduction_cntorig); |
| 5321 | else |
| 5322 | cnt = task_reduction_cntorig - 1; |
| 5323 | x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 5324 | size_int (cnt), NULL_TREE, NULL_TREE); |
| 5325 | |
| 5326 | gimple *g = gimple_build_assign (xv, x); |
| 5327 | gimple_seq_add_stmt (ilist, g); |
| 5328 | } |
| 5329 | else |
| 5330 | { |
| 5331 | unsigned int idx = *ctx->task_reduction_map->get (k: c); |
| 5332 | tree off; |
| 5333 | if (ctx->task_reductions[1 + idx]) |
| 5334 | off = fold_convert (sizetype, |
| 5335 | ctx->task_reductions[1 + idx]); |
| 5336 | else |
| 5337 | off = task_reduction_read (ilist, tskred_temp, sizetype, |
| 5338 | idx: 7 + 3 * idx + 1); |
| 5339 | gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR, |
| 5340 | tskred_base, off); |
| 5341 | gimple_seq_add_stmt (ilist, g); |
| 5342 | } |
| 5343 | x = fold_convert (build_pointer_type (boolean_type_node), |
| 5344 | xv); |
| 5345 | if (TREE_CONSTANT (v)) |
| 5346 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, |
| 5347 | TYPE_SIZE_UNIT (type)); |
| 5348 | else |
| 5349 | { |
| 5350 | tree t; |
| 5351 | if (is_omp_target (stmt: ctx->stmt)) |
| 5352 | t = NULL_TREE; |
| 5353 | else |
| 5354 | t = maybe_lookup_decl (var: v, ctx); |
| 5355 | if (t) |
| 5356 | v = t; |
| 5357 | else |
| 5358 | v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx); |
| 5359 | gimplify_expr (&v, ilist, NULL, is_gimple_val, |
| 5360 | fb_rvalue); |
| 5361 | t = fold_build2_loc (clause_loc, PLUS_EXPR, |
| 5362 | TREE_TYPE (v), v, |
| 5363 | build_int_cst (TREE_TYPE (v), 1)); |
| 5364 | t = fold_build2_loc (clause_loc, MULT_EXPR, |
| 5365 | TREE_TYPE (v), t, |
| 5366 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5367 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t); |
| 5368 | } |
| 5369 | cond = create_tmp_var (TREE_TYPE (x)); |
| 5370 | gimplify_assign (cond, x, ilist); |
| 5371 | x = xv; |
| 5372 | } |
| 5373 | else if (lower_private_allocate (var, new_var: type, allocator, |
| 5374 | allocate_ptr, ilist, ctx, |
| 5375 | is_ref: true, |
| 5376 | TREE_CONSTANT (v) |
| 5377 | ? TYPE_SIZE_UNIT (type) |
| 5378 | : sz)) |
| 5379 | x = allocate_ptr; |
| 5380 | else if (TREE_CONSTANT (v)) |
| 5381 | { |
| 5382 | x = create_tmp_var_raw (type, name); |
| 5383 | gimple_add_tmp_var (x); |
| 5384 | TREE_ADDRESSABLE (x) = 1; |
| 5385 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 5386 | } |
| 5387 | else |
| 5388 | { |
| 5389 | tree atmp |
| 5390 | = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN); |
| 5391 | tree al = size_int (TYPE_ALIGN (TREE_TYPE (type))); |
| 5392 | x = build_call_expr_loc (clause_loc, atmp, 2, sz, al); |
| 5393 | } |
| 5394 | |
| 5395 | tree ptype = build_pointer_type (TREE_TYPE (type)); |
| 5396 | x = fold_convert_loc (clause_loc, ptype, x); |
| 5397 | tree y = create_tmp_var (ptype, name); |
| 5398 | gimplify_assign (y, x, ilist); |
| 5399 | x = y; |
| 5400 | tree yb = y; |
| 5401 | |
| 5402 | if (!integer_zerop (bias)) |
| 5403 | { |
| 5404 | bias = fold_convert_loc (clause_loc, pointer_sized_int_node, |
| 5405 | bias); |
| 5406 | yb = fold_convert_loc (clause_loc, pointer_sized_int_node, |
| 5407 | x); |
| 5408 | yb = fold_build2_loc (clause_loc, MINUS_EXPR, |
| 5409 | pointer_sized_int_node, yb, bias); |
| 5410 | x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb); |
| 5411 | yb = create_tmp_var (ptype, name); |
| 5412 | gimplify_assign (yb, x, ilist); |
| 5413 | x = yb; |
| 5414 | } |
| 5415 | |
| 5416 | d = TREE_OPERAND (d, 0); |
| 5417 | if (TREE_CODE (d) == POINTER_PLUS_EXPR) |
| 5418 | d = TREE_OPERAND (d, 0); |
| 5419 | if (TREE_CODE (d) == ADDR_EXPR) |
| 5420 | { |
| 5421 | if (orig_var != var) |
| 5422 | { |
| 5423 | gcc_assert (is_variable_sized (orig_var)); |
| 5424 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), |
| 5425 | x); |
| 5426 | gimplify_assign (new_var, x, ilist); |
| 5427 | tree new_orig_var = lookup_decl (var: orig_var, ctx); |
| 5428 | tree t = build_fold_indirect_ref (new_var); |
| 5429 | DECL_IGNORED_P (new_var) = 0; |
| 5430 | TREE_THIS_NOTRAP (t) = 1; |
| 5431 | SET_DECL_VALUE_EXPR (new_orig_var, t); |
| 5432 | DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1; |
| 5433 | } |
| 5434 | else |
| 5435 | { |
| 5436 | x = build2 (MEM_REF, TREE_TYPE (new_var), x, |
| 5437 | build_int_cst (ptype, 0)); |
| 5438 | SET_DECL_VALUE_EXPR (new_var, x); |
| 5439 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 5440 | } |
| 5441 | } |
| 5442 | else |
| 5443 | { |
| 5444 | gcc_assert (orig_var == var); |
| 5445 | if (TREE_CODE (d) == INDIRECT_REF) |
| 5446 | { |
| 5447 | x = create_tmp_var (ptype, name); |
| 5448 | TREE_ADDRESSABLE (x) = 1; |
| 5449 | gimplify_assign (x, yb, ilist); |
| 5450 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 5451 | } |
| 5452 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 5453 | gimplify_assign (new_var, x, ilist); |
| 5454 | } |
| 5455 | /* GOMP_taskgroup_reduction_register memsets the whole |
| 5456 | array to zero. If the initializer is zero, we don't |
| 5457 | need to initialize it again, just mark it as ever |
| 5458 | used unconditionally, i.e. cond = true. */ |
| 5459 | if (cond |
| 5460 | && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE |
| 5461 | && initializer_zerop (omp_reduction_init (clause: c, |
| 5462 | TREE_TYPE (type)))) |
| 5463 | { |
| 5464 | gimple *g = gimple_build_assign (build_simple_mem_ref (cond), |
| 5465 | boolean_true_node); |
| 5466 | gimple_seq_add_stmt (ilist, g); |
| 5467 | continue; |
| 5468 | } |
| 5469 | tree end = create_artificial_label (UNKNOWN_LOCATION); |
| 5470 | if (cond) |
| 5471 | { |
| 5472 | gimple *g; |
| 5473 | if (!is_parallel_ctx (ctx)) |
| 5474 | { |
| 5475 | tree condv = create_tmp_var (boolean_type_node); |
| 5476 | g = gimple_build_assign (condv, |
| 5477 | build_simple_mem_ref (cond)); |
| 5478 | gimple_seq_add_stmt (ilist, g); |
| 5479 | tree lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 5480 | g = gimple_build_cond (NE_EXPR, condv, |
| 5481 | boolean_false_node, end, lab1); |
| 5482 | gimple_seq_add_stmt (ilist, g); |
| 5483 | gimple_seq_add_stmt (ilist, gimple_build_label (label: lab1)); |
| 5484 | } |
| 5485 | g = gimple_build_assign (build_simple_mem_ref (cond), |
| 5486 | boolean_true_node); |
| 5487 | gimple_seq_add_stmt (ilist, g); |
| 5488 | } |
| 5489 | |
| 5490 | tree y1 = create_tmp_var (ptype); |
| 5491 | gimplify_assign (y1, y, ilist); |
| 5492 | tree i2 = NULL_TREE, y2 = NULL_TREE; |
| 5493 | tree body2 = NULL_TREE, end2 = NULL_TREE; |
| 5494 | tree y3 = NULL_TREE, y4 = NULL_TREE; |
| 5495 | if (task_reduction_needs_orig_p) |
| 5496 | { |
| 5497 | y3 = create_tmp_var (ptype); |
| 5498 | tree ref; |
| 5499 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 5500 | ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 5501 | size_int (task_reduction_cnt_full |
| 5502 | + task_reduction_cntorig - 1), |
| 5503 | NULL_TREE, NULL_TREE); |
| 5504 | else |
| 5505 | { |
| 5506 | unsigned int idx = *ctx->task_reduction_map->get (k: c); |
| 5507 | ref = task_reduction_read (ilist, tskred_temp, type: ptype, |
| 5508 | idx: 7 + 3 * idx); |
| 5509 | } |
| 5510 | gimplify_assign (y3, ref, ilist); |
| 5511 | } |
| 5512 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd) |
| 5513 | { |
| 5514 | if (pass != 3) |
| 5515 | { |
| 5516 | y2 = create_tmp_var (ptype); |
| 5517 | gimplify_assign (y2, y, ilist); |
| 5518 | } |
| 5519 | if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) |
| 5520 | { |
| 5521 | tree ref = build_outer_var_ref (var, ctx); |
| 5522 | /* For ref build_outer_var_ref already performs this. */ |
| 5523 | if (TREE_CODE (d) == INDIRECT_REF) |
| 5524 | gcc_assert (omp_privatize_by_reference (var)); |
| 5525 | else if (TREE_CODE (d) == ADDR_EXPR) |
| 5526 | ref = build_fold_addr_expr (ref); |
| 5527 | else if (omp_privatize_by_reference (decl: var)) |
| 5528 | ref = build_fold_addr_expr (ref); |
| 5529 | ref = fold_convert_loc (clause_loc, ptype, ref); |
| 5530 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) |
| 5531 | && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) |
| 5532 | { |
| 5533 | y3 = create_tmp_var (ptype); |
| 5534 | gimplify_assign (y3, unshare_expr (ref), ilist); |
| 5535 | } |
| 5536 | if (is_simd) |
| 5537 | { |
| 5538 | y4 = create_tmp_var (ptype); |
| 5539 | gimplify_assign (y4, ref, dlist); |
| 5540 | } |
| 5541 | } |
| 5542 | } |
| 5543 | tree i = create_tmp_var (TREE_TYPE (v)); |
| 5544 | gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist); |
| 5545 | tree body = create_artificial_label (UNKNOWN_LOCATION); |
| 5546 | gimple_seq_add_stmt (ilist, gimple_build_label (label: body)); |
| 5547 | if (y2) |
| 5548 | { |
| 5549 | i2 = create_tmp_var (TREE_TYPE (v)); |
| 5550 | gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist); |
| 5551 | body2 = create_artificial_label (UNKNOWN_LOCATION); |
| 5552 | end2 = create_artificial_label (UNKNOWN_LOCATION); |
| 5553 | gimple_seq_add_stmt (dlist, gimple_build_label (label: body2)); |
| 5554 | } |
| 5555 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 5556 | { |
| 5557 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 5558 | tree decl_placeholder |
| 5559 | = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); |
| 5560 | SET_DECL_VALUE_EXPR (decl_placeholder, |
| 5561 | build_simple_mem_ref (y1)); |
| 5562 | DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; |
| 5563 | SET_DECL_VALUE_EXPR (placeholder, |
| 5564 | y3 ? build_simple_mem_ref (y3) |
| 5565 | : error_mark_node); |
| 5566 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 5567 | x = lang_hooks.decls.omp_clause_default_ctor |
| 5568 | (c, build_simple_mem_ref (y1), |
| 5569 | y3 ? build_simple_mem_ref (y3) : NULL_TREE); |
| 5570 | if (x) |
| 5571 | gimplify_and_add (x, ilist); |
| 5572 | if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 5573 | { |
| 5574 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 5575 | lower_omp (&tseq, ctx); |
| 5576 | gimple_seq_add_seq (ilist, tseq); |
| 5577 | } |
| 5578 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 5579 | if (is_simd) |
| 5580 | { |
| 5581 | SET_DECL_VALUE_EXPR (decl_placeholder, |
| 5582 | build_simple_mem_ref (y2)); |
| 5583 | SET_DECL_VALUE_EXPR (placeholder, |
| 5584 | build_simple_mem_ref (y4)); |
| 5585 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 5586 | lower_omp (&tseq, ctx); |
| 5587 | gimple_seq_add_seq (dlist, tseq); |
| 5588 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 5589 | } |
| 5590 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 5591 | DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0; |
| 5592 | if (y2) |
| 5593 | { |
| 5594 | x = lang_hooks.decls.omp_clause_dtor |
| 5595 | (c, build_simple_mem_ref (y2)); |
| 5596 | if (x) |
| 5597 | gimplify_and_add (x, dlist); |
| 5598 | } |
| 5599 | } |
| 5600 | else |
| 5601 | { |
| 5602 | x = omp_reduction_init (clause: c, TREE_TYPE (type)); |
| 5603 | enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); |
| 5604 | |
| 5605 | /* reduction(-:var) sums up the partial results, so it |
| 5606 | acts identically to reduction(+:var). */ |
| 5607 | if (code == MINUS_EXPR) |
| 5608 | code = PLUS_EXPR; |
| 5609 | |
| 5610 | gimplify_assign (build_simple_mem_ref (y1), x, ilist); |
| 5611 | if (is_simd) |
| 5612 | { |
| 5613 | x = build2 (code, TREE_TYPE (type), |
| 5614 | build_simple_mem_ref (y4), |
| 5615 | build_simple_mem_ref (y2)); |
| 5616 | gimplify_assign (build_simple_mem_ref (y4), x, dlist); |
| 5617 | } |
| 5618 | } |
| 5619 | gimple *g |
| 5620 | = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1, |
| 5621 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5622 | gimple_seq_add_stmt (ilist, g); |
| 5623 | if (y3) |
| 5624 | { |
| 5625 | g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3, |
| 5626 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5627 | gimple_seq_add_stmt (ilist, g); |
| 5628 | } |
| 5629 | g = gimple_build_assign (i, PLUS_EXPR, i, |
| 5630 | build_int_cst (TREE_TYPE (i), 1)); |
| 5631 | gimple_seq_add_stmt (ilist, g); |
| 5632 | g = gimple_build_cond (LE_EXPR, i, v, body, end); |
| 5633 | gimple_seq_add_stmt (ilist, g); |
| 5634 | gimple_seq_add_stmt (ilist, gimple_build_label (label: end)); |
| 5635 | if (y2) |
| 5636 | { |
| 5637 | g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2, |
| 5638 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5639 | gimple_seq_add_stmt (dlist, g); |
| 5640 | if (y4) |
| 5641 | { |
| 5642 | g = gimple_build_assign |
| 5643 | (y4, POINTER_PLUS_EXPR, y4, |
| 5644 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5645 | gimple_seq_add_stmt (dlist, g); |
| 5646 | } |
| 5647 | g = gimple_build_assign (i2, PLUS_EXPR, i2, |
| 5648 | build_int_cst (TREE_TYPE (i2), 1)); |
| 5649 | gimple_seq_add_stmt (dlist, g); |
| 5650 | g = gimple_build_cond (LE_EXPR, i2, v, body2, end2); |
| 5651 | gimple_seq_add_stmt (dlist, g); |
| 5652 | gimple_seq_add_stmt (dlist, gimple_build_label (label: end2)); |
| 5653 | } |
| 5654 | if (allocator) |
| 5655 | { |
| 5656 | tree f = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE); |
| 5657 | g = gimple_build_call (f, 2, allocate_ptr, allocator); |
| 5658 | gimple_seq_add_stmt (dlist, g); |
| 5659 | } |
| 5660 | continue; |
| 5661 | } |
| 5662 | else if (pass == 2) |
| 5663 | { |
| 5664 | tree out = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 5665 | if (is_global_var (t: out)) |
| 5666 | x = var; |
| 5667 | else if (is_omp_target (stmt: ctx->stmt)) |
| 5668 | x = out; |
| 5669 | else |
| 5670 | { |
| 5671 | bool by_ref = use_pointer_for_field (decl: var, shared_ctx: ctx); |
| 5672 | x = build_receiver_ref (var, by_ref, ctx); |
| 5673 | } |
| 5674 | if (!omp_privatize_by_reference (decl: var)) |
| 5675 | x = build_fold_addr_expr (x); |
| 5676 | x = fold_convert (ptr_type_node, x); |
| 5677 | unsigned cnt = task_reduction_cnt - 1; |
| 5678 | if (!task_reduction_needs_orig_p) |
| 5679 | cnt += task_reduction_cntorig_full - task_reduction_cntorig; |
| 5680 | else |
| 5681 | cnt = task_reduction_cntorig - 1; |
| 5682 | tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 5683 | size_int (cnt), NULL_TREE, NULL_TREE); |
| 5684 | gimplify_assign (r, x, ilist); |
| 5685 | continue; |
| 5686 | } |
| 5687 | else if (pass == 3) |
| 5688 | { |
| 5689 | tree type = TREE_TYPE (new_var); |
| 5690 | if (!omp_privatize_by_reference (decl: var)) |
| 5691 | type = build_pointer_type (type); |
| 5692 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 5693 | { |
| 5694 | unsigned cnt = task_reduction_cnt - 1; |
| 5695 | if (!task_reduction_needs_orig_p) |
| 5696 | cnt += (task_reduction_cntorig_full |
| 5697 | - task_reduction_cntorig); |
| 5698 | else |
| 5699 | cnt = task_reduction_cntorig - 1; |
| 5700 | x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 5701 | size_int (cnt), NULL_TREE, NULL_TREE); |
| 5702 | } |
| 5703 | else |
| 5704 | { |
| 5705 | unsigned int idx = *ctx->task_reduction_map->get (k: c); |
| 5706 | tree off; |
| 5707 | if (ctx->task_reductions[1 + idx]) |
| 5708 | off = fold_convert (sizetype, |
| 5709 | ctx->task_reductions[1 + idx]); |
| 5710 | else |
| 5711 | off = task_reduction_read (ilist, tskred_temp, sizetype, |
| 5712 | idx: 7 + 3 * idx + 1); |
| 5713 | x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, |
| 5714 | tskred_base, off); |
| 5715 | } |
| 5716 | x = fold_convert (type, x); |
| 5717 | tree t; |
| 5718 | if (omp_privatize_by_reference (decl: var)) |
| 5719 | { |
| 5720 | gimplify_assign (new_var, x, ilist); |
| 5721 | t = new_var; |
| 5722 | new_var = build_simple_mem_ref (new_var); |
| 5723 | } |
| 5724 | else |
| 5725 | { |
| 5726 | t = create_tmp_var (type); |
| 5727 | gimplify_assign (t, x, ilist); |
| 5728 | SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t)); |
| 5729 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 5730 | } |
| 5731 | t = fold_convert (build_pointer_type (boolean_type_node), t); |
| 5732 | t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, |
| 5733 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 5734 | cond = create_tmp_var (TREE_TYPE (t)); |
| 5735 | gimplify_assign (cond, t, ilist); |
| 5736 | } |
| 5737 | else if (is_variable_sized (expr: var)) |
| 5738 | { |
| 5739 | /* For variable sized types, we need to allocate the |
| 5740 | actual storage here. Call alloca and store the |
| 5741 | result in the pointer decl that we created elsewhere. */ |
| 5742 | if (pass == 0) |
| 5743 | continue; |
| 5744 | |
| 5745 | if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx)) |
| 5746 | { |
| 5747 | tree tmp; |
| 5748 | |
| 5749 | ptr = DECL_VALUE_EXPR (new_var); |
| 5750 | gcc_assert (TREE_CODE (ptr) == INDIRECT_REF); |
| 5751 | ptr = TREE_OPERAND (ptr, 0); |
| 5752 | gcc_assert (DECL_P (ptr)); |
| 5753 | x = TYPE_SIZE_UNIT (TREE_TYPE (new_var)); |
| 5754 | |
| 5755 | if (lower_private_allocate (var, new_var, allocator, |
| 5756 | allocate_ptr, ilist, ctx, |
| 5757 | is_ref: false, size: x)) |
| 5758 | tmp = allocate_ptr; |
| 5759 | else |
| 5760 | { |
| 5761 | /* void *tmp = __builtin_alloca */ |
| 5762 | tree atmp |
| 5763 | = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN); |
| 5764 | gcall *stmt |
| 5765 | = gimple_build_call (atmp, 2, x, |
| 5766 | size_int (DECL_ALIGN (var))); |
| 5767 | cfun->calls_alloca = 1; |
| 5768 | tmp = create_tmp_var_raw (ptr_type_node); |
| 5769 | gimple_add_tmp_var (tmp); |
| 5770 | gimple_call_set_lhs (gs: stmt, lhs: tmp); |
| 5771 | |
| 5772 | gimple_seq_add_stmt (ilist, stmt); |
| 5773 | } |
| 5774 | |
| 5775 | x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp); |
| 5776 | gimplify_assign (ptr, x, ilist); |
| 5777 | } |
| 5778 | } |
| 5779 | else if (omp_privatize_by_reference (decl: var) |
| 5780 | && (c_kind != OMP_CLAUSE_FIRSTPRIVATE |
| 5781 | || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))) |
| 5782 | { |
| 5783 | /* For references that are being privatized for Fortran, |
| 5784 | allocate new backing storage for the new pointer |
| 5785 | variable. This allows us to avoid changing all the |
| 5786 | code that expects a pointer to something that expects |
| 5787 | a direct variable. */ |
| 5788 | if (pass == 0) |
| 5789 | continue; |
| 5790 | |
| 5791 | x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); |
| 5792 | if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx)) |
| 5793 | { |
| 5794 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 5795 | if (ctx->allocate_map) |
| 5796 | if (tree *allocatep = ctx->allocate_map->get (k: var)) |
| 5797 | { |
| 5798 | allocator = *allocatep; |
| 5799 | if (TREE_CODE (allocator) == TREE_LIST) |
| 5800 | allocator = TREE_PURPOSE (allocator); |
| 5801 | if (TREE_CODE (allocator) != INTEGER_CST) |
| 5802 | allocator = build_outer_var_ref (var: allocator, ctx); |
| 5803 | allocator = fold_convert (pointer_sized_int_node, |
| 5804 | allocator); |
| 5805 | allocate_ptr = unshare_expr (x); |
| 5806 | } |
| 5807 | if (allocator == NULL_TREE) |
| 5808 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 5809 | } |
| 5810 | else if (lower_private_allocate (var, new_var, allocator, |
| 5811 | allocate_ptr, |
| 5812 | ilist, ctx, is_ref: true, size: x)) |
| 5813 | x = allocate_ptr; |
| 5814 | else if (TREE_CONSTANT (x)) |
| 5815 | { |
| 5816 | /* For reduction in SIMD loop, defer adding the |
| 5817 | initialization of the reference, because if we decide |
| 5818 | to use SIMD array for it, the initilization could cause |
| 5819 | expansion ICE. Ditto for other privatization clauses. */ |
| 5820 | if (is_simd) |
| 5821 | x = NULL_TREE; |
| 5822 | else |
| 5823 | { |
| 5824 | x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)), |
| 5825 | get_name (var)); |
| 5826 | gimple_add_tmp_var (x); |
| 5827 | TREE_ADDRESSABLE (x) = 1; |
| 5828 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 5829 | } |
| 5830 | } |
| 5831 | else |
| 5832 | { |
| 5833 | tree atmp |
| 5834 | = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN); |
| 5835 | tree rtype = TREE_TYPE (TREE_TYPE (new_var)); |
| 5836 | tree al = size_int (TYPE_ALIGN (rtype)); |
| 5837 | x = build_call_expr_loc (clause_loc, atmp, 2, x, al); |
| 5838 | } |
| 5839 | |
| 5840 | if (x) |
| 5841 | { |
| 5842 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 5843 | gimplify_assign (new_var, x, ilist); |
| 5844 | } |
| 5845 | |
| 5846 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 5847 | } |
| 5848 | else if ((c_kind == OMP_CLAUSE_REDUCTION |
| 5849 | || c_kind == OMP_CLAUSE_IN_REDUCTION) |
| 5850 | && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 5851 | { |
| 5852 | if (pass == 0) |
| 5853 | continue; |
| 5854 | } |
| 5855 | else if (pass != 0) |
| 5856 | continue; |
| 5857 | |
| 5858 | switch (OMP_CLAUSE_CODE (c)) |
| 5859 | { |
| 5860 | case OMP_CLAUSE_SHARED: |
| 5861 | /* Ignore shared directives in teams construct inside |
| 5862 | target construct. */ |
| 5863 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS |
| 5864 | && !is_host_teams_ctx (ctx)) |
| 5865 | continue; |
| 5866 | /* Shared global vars are just accessed directly. */ |
| 5867 | if (is_global_var (t: new_var)) |
| 5868 | break; |
| 5869 | /* For taskloop firstprivate/lastprivate, represented |
| 5870 | as firstprivate and shared clause on the task, new_var |
| 5871 | is the firstprivate var. */ |
| 5872 | if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 5873 | break; |
| 5874 | /* Set up the DECL_VALUE_EXPR for shared variables now. This |
| 5875 | needs to be delayed until after fixup_child_record_type so |
| 5876 | that we get the correct type during the dereference. */ |
| 5877 | by_ref = use_pointer_for_field (decl: var, shared_ctx: ctx); |
| 5878 | x = build_receiver_ref (var, by_ref, ctx); |
| 5879 | SET_DECL_VALUE_EXPR (new_var, x); |
| 5880 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 5881 | |
| 5882 | /* ??? If VAR is not passed by reference, and the variable |
| 5883 | hasn't been initialized yet, then we'll get a warning for |
| 5884 | the store into the omp_data_s structure. Ideally, we'd be |
| 5885 | able to notice this and not store anything at all, but |
| 5886 | we're generating code too early. Suppress the warning. */ |
| 5887 | if (!by_ref) |
| 5888 | suppress_warning (var, OPT_Wuninitialized); |
| 5889 | break; |
| 5890 | |
| 5891 | case OMP_CLAUSE__CONDTEMP_: |
| 5892 | if (is_parallel_ctx (ctx)) |
| 5893 | { |
| 5894 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 5895 | SET_DECL_VALUE_EXPR (new_var, x); |
| 5896 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 5897 | } |
| 5898 | else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)) |
| 5899 | { |
| 5900 | x = build_zero_cst (TREE_TYPE (var)); |
| 5901 | goto do_private; |
| 5902 | } |
| 5903 | break; |
| 5904 | |
| 5905 | case OMP_CLAUSE_LASTPRIVATE: |
| 5906 | if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) |
| 5907 | break; |
| 5908 | /* FALLTHRU */ |
| 5909 | |
| 5910 | case OMP_CLAUSE_PRIVATE: |
| 5911 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE) |
| 5912 | x = build_outer_var_ref (var, ctx); |
| 5913 | else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) |
| 5914 | { |
| 5915 | if (is_task_ctx (ctx)) |
| 5916 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 5917 | else |
| 5918 | x = build_outer_var_ref (var, ctx, code: OMP_CLAUSE_PRIVATE); |
| 5919 | } |
| 5920 | else |
| 5921 | x = NULL; |
| 5922 | do_private: |
| 5923 | tree nx; |
| 5924 | bool copy_ctor; |
| 5925 | copy_ctor = false; |
| 5926 | lower_private_allocate (var, new_var, allocator, allocate_ptr, |
| 5927 | ilist, ctx, is_ref: false, NULL_TREE); |
| 5928 | nx = unshare_expr (new_var); |
| 5929 | if (is_simd |
| 5930 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 5931 | && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)) |
| 5932 | copy_ctor = true; |
| 5933 | if (copy_ctor) |
| 5934 | nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x); |
| 5935 | else |
| 5936 | nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x); |
| 5937 | if (is_simd) |
| 5938 | { |
| 5939 | tree y = lang_hooks.decls.omp_clause_dtor (c, new_var); |
| 5940 | if ((TREE_ADDRESSABLE (new_var) || nx || y |
| 5941 | || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 5942 | && (gimple_omp_for_collapse (gs: ctx->stmt) != 1 |
| 5943 | || (gimple_omp_for_index (gs: ctx->stmt, i: 0) |
| 5944 | != new_var))) |
| 5945 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_ |
| 5946 | || omp_privatize_by_reference (decl: var)) |
| 5947 | && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx, |
| 5948 | ivar, lvar)) |
| 5949 | { |
| 5950 | if (omp_privatize_by_reference (decl: var)) |
| 5951 | { |
| 5952 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 5953 | tree new_vard = TREE_OPERAND (new_var, 0); |
| 5954 | gcc_assert (DECL_P (new_vard)); |
| 5955 | SET_DECL_VALUE_EXPR (new_vard, |
| 5956 | build_fold_addr_expr (lvar)); |
| 5957 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 5958 | } |
| 5959 | |
| 5960 | if (nx) |
| 5961 | { |
| 5962 | tree iv = unshare_expr (ivar); |
| 5963 | if (copy_ctor) |
| 5964 | x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, |
| 5965 | x); |
| 5966 | else |
| 5967 | x = lang_hooks.decls.omp_clause_default_ctor (c, |
| 5968 | iv, |
| 5969 | x); |
| 5970 | } |
| 5971 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_) |
| 5972 | { |
| 5973 | x = build2 (MODIFY_EXPR, TREE_TYPE (ivar), |
| 5974 | unshare_expr (ivar), x); |
| 5975 | nx = x; |
| 5976 | } |
| 5977 | if (nx && x) |
| 5978 | gimplify_and_add (x, &llist[0]); |
| 5979 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 5980 | && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) |
| 5981 | { |
| 5982 | tree v = new_var; |
| 5983 | if (!DECL_P (v)) |
| 5984 | { |
| 5985 | gcc_assert (TREE_CODE (v) == MEM_REF); |
| 5986 | v = TREE_OPERAND (v, 0); |
| 5987 | gcc_assert (DECL_P (v)); |
| 5988 | } |
| 5989 | v = *ctx->lastprivate_conditional_map->get (k: v); |
| 5990 | tree t = create_tmp_var (TREE_TYPE (v)); |
| 5991 | tree z = build_zero_cst (TREE_TYPE (v)); |
| 5992 | tree orig_v |
| 5993 | = build_outer_var_ref (var, ctx, |
| 5994 | code: OMP_CLAUSE_LASTPRIVATE); |
| 5995 | gimple_seq_add_stmt (dlist, |
| 5996 | gimple_build_assign (t, z)); |
| 5997 | gcc_assert (DECL_HAS_VALUE_EXPR_P (v)); |
| 5998 | tree civar = DECL_VALUE_EXPR (v); |
| 5999 | gcc_assert (TREE_CODE (civar) == ARRAY_REF); |
| 6000 | civar = unshare_expr (civar); |
| 6001 | TREE_OPERAND (civar, 1) = sctx.idx; |
| 6002 | x = build2 (MODIFY_EXPR, TREE_TYPE (t), t, |
| 6003 | unshare_expr (civar)); |
| 6004 | x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x, |
| 6005 | build2 (MODIFY_EXPR, TREE_TYPE (orig_v), |
| 6006 | orig_v, unshare_expr (ivar))); |
| 6007 | tree cond = build2 (LT_EXPR, boolean_type_node, t, |
| 6008 | civar); |
| 6009 | x = build3 (COND_EXPR, void_type_node, cond, x, |
| 6010 | void_node); |
| 6011 | gimple_seq tseq = NULL; |
| 6012 | gimplify_and_add (x, &tseq); |
| 6013 | if (ctx->outer) |
| 6014 | lower_omp (&tseq, ctx->outer); |
| 6015 | gimple_seq_add_seq (&llist[1], tseq); |
| 6016 | } |
| 6017 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 6018 | && ctx->for_simd_scan_phase) |
| 6019 | { |
| 6020 | x = unshare_expr (ivar); |
| 6021 | tree orig_v |
| 6022 | = build_outer_var_ref (var, ctx, |
| 6023 | code: OMP_CLAUSE_LASTPRIVATE); |
| 6024 | x = lang_hooks.decls.omp_clause_assign_op (c, x, |
| 6025 | orig_v); |
| 6026 | gimplify_and_add (x, &llist[0]); |
| 6027 | } |
| 6028 | if (y) |
| 6029 | { |
| 6030 | y = lang_hooks.decls.omp_clause_dtor (c, ivar); |
| 6031 | if (y) |
| 6032 | gimplify_and_add (y, &llist[1]); |
| 6033 | } |
| 6034 | break; |
| 6035 | } |
| 6036 | if (omp_privatize_by_reference (decl: var)) |
| 6037 | { |
| 6038 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6039 | tree new_vard = TREE_OPERAND (new_var, 0); |
| 6040 | gcc_assert (DECL_P (new_vard)); |
| 6041 | tree type = TREE_TYPE (TREE_TYPE (new_vard)); |
| 6042 | x = TYPE_SIZE_UNIT (type); |
| 6043 | if (TREE_CONSTANT (x)) |
| 6044 | { |
| 6045 | x = create_tmp_var_raw (type, get_name (var)); |
| 6046 | gimple_add_tmp_var (x); |
| 6047 | TREE_ADDRESSABLE (x) = 1; |
| 6048 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 6049 | x = fold_convert_loc (clause_loc, |
| 6050 | TREE_TYPE (new_vard), x); |
| 6051 | gimplify_assign (new_vard, x, ilist); |
| 6052 | } |
| 6053 | } |
| 6054 | } |
| 6055 | if (nx) |
| 6056 | gimplify_and_add (nx, ilist); |
| 6057 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 6058 | && is_simd |
| 6059 | && ctx->for_simd_scan_phase) |
| 6060 | { |
| 6061 | tree orig_v = build_outer_var_ref (var, ctx, |
| 6062 | code: OMP_CLAUSE_LASTPRIVATE); |
| 6063 | x = lang_hooks.decls.omp_clause_assign_op (c, new_var, |
| 6064 | orig_v); |
| 6065 | gimplify_and_add (x, ilist); |
| 6066 | } |
| 6067 | /* FALLTHRU */ |
| 6068 | |
| 6069 | do_dtor: |
| 6070 | x = lang_hooks.decls.omp_clause_dtor (c, new_var); |
| 6071 | if (x) |
| 6072 | gimplify_and_add (x, dlist); |
| 6073 | if (allocator) |
| 6074 | { |
| 6075 | if (!is_gimple_val (allocator)) |
| 6076 | { |
| 6077 | tree avar = create_tmp_var (TREE_TYPE (allocator)); |
| 6078 | gimplify_assign (avar, allocator, dlist); |
| 6079 | allocator = avar; |
| 6080 | } |
| 6081 | if (!is_gimple_val (allocate_ptr)) |
| 6082 | { |
| 6083 | tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr)); |
| 6084 | gimplify_assign (apvar, allocate_ptr, dlist); |
| 6085 | allocate_ptr = apvar; |
| 6086 | } |
| 6087 | tree f = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE); |
| 6088 | gimple *g |
| 6089 | = gimple_build_call (f, 2, allocate_ptr, allocator); |
| 6090 | gimple_seq_add_stmt (dlist, g); |
| 6091 | } |
| 6092 | break; |
| 6093 | |
| 6094 | case OMP_CLAUSE_LINEAR: |
| 6095 | if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) |
| 6096 | goto do_firstprivate; |
| 6097 | if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) |
| 6098 | x = NULL; |
| 6099 | else |
| 6100 | x = build_outer_var_ref (var, ctx); |
| 6101 | goto do_private; |
| 6102 | |
| 6103 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 6104 | if (is_task_ctx (ctx)) |
| 6105 | { |
| 6106 | if ((omp_privatize_by_reference (decl: var) |
| 6107 | && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)) |
| 6108 | || is_variable_sized (expr: var)) |
| 6109 | goto do_dtor; |
| 6110 | else if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var, |
| 6111 | ctx)) |
| 6112 | || use_pointer_for_field (decl: var, NULL)) |
| 6113 | { |
| 6114 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 6115 | if (ctx->allocate_map) |
| 6116 | if (tree *allocatep = ctx->allocate_map->get (k: var)) |
| 6117 | { |
| 6118 | allocator = *allocatep; |
| 6119 | if (TREE_CODE (allocator) == TREE_LIST) |
| 6120 | allocator = TREE_PURPOSE (allocator); |
| 6121 | if (TREE_CODE (allocator) != INTEGER_CST) |
| 6122 | allocator = build_outer_var_ref (var: allocator, ctx); |
| 6123 | allocator = fold_convert (pointer_sized_int_node, |
| 6124 | allocator); |
| 6125 | allocate_ptr = unshare_expr (x); |
| 6126 | x = build_simple_mem_ref (x); |
| 6127 | TREE_THIS_NOTRAP (x) = 1; |
| 6128 | } |
| 6129 | SET_DECL_VALUE_EXPR (new_var, x); |
| 6130 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 6131 | goto do_dtor; |
| 6132 | } |
| 6133 | } |
| 6134 | if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) |
| 6135 | && omp_privatize_by_reference (decl: var)) |
| 6136 | { |
| 6137 | x = build_outer_var_ref (var, ctx); |
| 6138 | gcc_assert (TREE_CODE (x) == MEM_REF |
| 6139 | && integer_zerop (TREE_OPERAND (x, 1))); |
| 6140 | x = TREE_OPERAND (x, 0); |
| 6141 | x = lang_hooks.decls.omp_clause_copy_ctor |
| 6142 | (c, unshare_expr (new_var), x); |
| 6143 | gimplify_and_add (x, ilist); |
| 6144 | goto do_dtor; |
| 6145 | } |
| 6146 | do_firstprivate: |
| 6147 | lower_private_allocate (var, new_var, allocator, allocate_ptr, |
| 6148 | ilist, ctx, is_ref: false, NULL_TREE); |
| 6149 | x = build_outer_var_ref (var, ctx); |
| 6150 | if (is_simd) |
| 6151 | { |
| 6152 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 6153 | && gimple_omp_for_combined_into_p (g: ctx->stmt)) |
| 6154 | { |
| 6155 | tree t = OMP_CLAUSE_LINEAR_STEP (c); |
| 6156 | if (DECL_P (t)) |
| 6157 | t = build_outer_var_ref (var: t, ctx); |
| 6158 | tree stept = TREE_TYPE (t); |
| 6159 | tree ct = omp_find_clause (clauses, |
| 6160 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 6161 | gcc_assert (ct); |
| 6162 | tree l = OMP_CLAUSE_DECL (ct); |
| 6163 | tree n1 = fd->loop.n1; |
| 6164 | tree step = fd->loop.step; |
| 6165 | tree itype = TREE_TYPE (l); |
| 6166 | if (POINTER_TYPE_P (itype)) |
| 6167 | itype = signed_type_for (itype); |
| 6168 | l = fold_build2 (MINUS_EXPR, itype, l, n1); |
| 6169 | if (TYPE_UNSIGNED (itype) |
| 6170 | && fd->loop.cond_code == GT_EXPR) |
| 6171 | l = fold_build2 (TRUNC_DIV_EXPR, itype, |
| 6172 | fold_build1 (NEGATE_EXPR, itype, l), |
| 6173 | fold_build1 (NEGATE_EXPR, |
| 6174 | itype, step)); |
| 6175 | else |
| 6176 | l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step); |
| 6177 | t = fold_build2 (MULT_EXPR, stept, |
| 6178 | fold_convert (stept, l), t); |
| 6179 | |
| 6180 | if (OMP_CLAUSE_LINEAR_ARRAY (c)) |
| 6181 | { |
| 6182 | if (omp_privatize_by_reference (decl: var)) |
| 6183 | { |
| 6184 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6185 | tree new_vard = TREE_OPERAND (new_var, 0); |
| 6186 | gcc_assert (DECL_P (new_vard)); |
| 6187 | tree type = TREE_TYPE (TREE_TYPE (new_vard)); |
| 6188 | nx = TYPE_SIZE_UNIT (type); |
| 6189 | if (TREE_CONSTANT (nx)) |
| 6190 | { |
| 6191 | nx = create_tmp_var_raw (type, |
| 6192 | get_name (var)); |
| 6193 | gimple_add_tmp_var (nx); |
| 6194 | TREE_ADDRESSABLE (nx) = 1; |
| 6195 | nx = build_fold_addr_expr_loc (clause_loc, |
| 6196 | nx); |
| 6197 | nx = fold_convert_loc (clause_loc, |
| 6198 | TREE_TYPE (new_vard), |
| 6199 | nx); |
| 6200 | gimplify_assign (new_vard, nx, ilist); |
| 6201 | } |
| 6202 | } |
| 6203 | |
| 6204 | x = lang_hooks.decls.omp_clause_linear_ctor |
| 6205 | (c, new_var, x, t); |
| 6206 | gimplify_and_add (x, ilist); |
| 6207 | goto do_dtor; |
| 6208 | } |
| 6209 | |
| 6210 | if (POINTER_TYPE_P (TREE_TYPE (x))) |
| 6211 | x = fold_build_pointer_plus (x, t); |
| 6212 | else |
| 6213 | x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, |
| 6214 | fold_convert (TREE_TYPE (x), t)); |
| 6215 | } |
| 6216 | |
| 6217 | if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR |
| 6218 | || TREE_ADDRESSABLE (new_var) |
| 6219 | || omp_privatize_by_reference (decl: var)) |
| 6220 | && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx, |
| 6221 | ivar, lvar)) |
| 6222 | { |
| 6223 | if (omp_privatize_by_reference (decl: var)) |
| 6224 | { |
| 6225 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6226 | tree new_vard = TREE_OPERAND (new_var, 0); |
| 6227 | gcc_assert (DECL_P (new_vard)); |
| 6228 | SET_DECL_VALUE_EXPR (new_vard, |
| 6229 | build_fold_addr_expr (lvar)); |
| 6230 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 6231 | } |
| 6232 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR) |
| 6233 | { |
| 6234 | tree iv = create_tmp_var (TREE_TYPE (new_var)); |
| 6235 | x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x); |
| 6236 | gimplify_and_add (x, ilist); |
| 6237 | gimple_stmt_iterator gsi |
| 6238 | = gsi_start (seq&: *gimple_omp_body_ptr (gs: ctx->stmt)); |
| 6239 | gassign *g |
| 6240 | = gimple_build_assign (unshare_expr (lvar), iv); |
| 6241 | gsi_insert_before_without_update (&gsi, g, |
| 6242 | GSI_SAME_STMT); |
| 6243 | tree t = OMP_CLAUSE_LINEAR_STEP (c); |
| 6244 | enum tree_code code = PLUS_EXPR; |
| 6245 | if (POINTER_TYPE_P (TREE_TYPE (new_var))) |
| 6246 | code = POINTER_PLUS_EXPR; |
| 6247 | g = gimple_build_assign (iv, code, iv, t); |
| 6248 | gsi_insert_before_without_update (&gsi, g, |
| 6249 | GSI_SAME_STMT); |
| 6250 | break; |
| 6251 | } |
| 6252 | x = lang_hooks.decls.omp_clause_copy_ctor |
| 6253 | (c, unshare_expr (ivar), x); |
| 6254 | gimplify_and_add (x, &llist[0]); |
| 6255 | x = lang_hooks.decls.omp_clause_dtor (c, ivar); |
| 6256 | if (x) |
| 6257 | gimplify_and_add (x, &llist[1]); |
| 6258 | break; |
| 6259 | } |
| 6260 | if (omp_privatize_by_reference (decl: var)) |
| 6261 | { |
| 6262 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6263 | tree new_vard = TREE_OPERAND (new_var, 0); |
| 6264 | gcc_assert (DECL_P (new_vard)); |
| 6265 | tree type = TREE_TYPE (TREE_TYPE (new_vard)); |
| 6266 | nx = TYPE_SIZE_UNIT (type); |
| 6267 | if (TREE_CONSTANT (nx)) |
| 6268 | { |
| 6269 | nx = create_tmp_var_raw (type, get_name (var)); |
| 6270 | gimple_add_tmp_var (nx); |
| 6271 | TREE_ADDRESSABLE (nx) = 1; |
| 6272 | nx = build_fold_addr_expr_loc (clause_loc, nx); |
| 6273 | nx = fold_convert_loc (clause_loc, |
| 6274 | TREE_TYPE (new_vard), nx); |
| 6275 | gimplify_assign (new_vard, nx, ilist); |
| 6276 | } |
| 6277 | } |
| 6278 | } |
| 6279 | x = lang_hooks.decls.omp_clause_copy_ctor |
| 6280 | (c, unshare_expr (new_var), x); |
| 6281 | gimplify_and_add (x, ilist); |
| 6282 | goto do_dtor; |
| 6283 | |
| 6284 | case OMP_CLAUSE__LOOPTEMP_: |
| 6285 | case OMP_CLAUSE__REDUCTEMP_: |
| 6286 | gcc_assert (is_taskreg_ctx (ctx)); |
| 6287 | x = build_outer_var_ref (var, ctx); |
| 6288 | x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x); |
| 6289 | gimplify_and_add (x, ilist); |
| 6290 | break; |
| 6291 | |
| 6292 | case OMP_CLAUSE_COPYIN: |
| 6293 | by_ref = use_pointer_for_field (decl: var, NULL); |
| 6294 | x = build_receiver_ref (var, by_ref, ctx); |
| 6295 | x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x); |
| 6296 | append_to_statement_list (x, ©in_seq); |
| 6297 | copyin_by_ref |= by_ref; |
| 6298 | break; |
| 6299 | |
| 6300 | case OMP_CLAUSE_REDUCTION: |
| 6301 | case OMP_CLAUSE_IN_REDUCTION: |
| 6302 | /* OpenACC reductions are initialized using the |
| 6303 | GOACC_REDUCTION internal function. */ |
| 6304 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 6305 | break; |
| 6306 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 6307 | { |
| 6308 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 6309 | gimple *tseq; |
| 6310 | tree ptype = TREE_TYPE (placeholder); |
| 6311 | if (cond) |
| 6312 | { |
| 6313 | x = error_mark_node; |
| 6314 | if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) |
| 6315 | && !task_reduction_needs_orig_p) |
| 6316 | x = var; |
| 6317 | else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) |
| 6318 | { |
| 6319 | tree pptype = build_pointer_type (ptype); |
| 6320 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) |
| 6321 | x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, |
| 6322 | size_int (task_reduction_cnt_full |
| 6323 | + task_reduction_cntorig - 1), |
| 6324 | NULL_TREE, NULL_TREE); |
| 6325 | else |
| 6326 | { |
| 6327 | unsigned int idx |
| 6328 | = *ctx->task_reduction_map->get (k: c); |
| 6329 | x = task_reduction_read (ilist, tskred_temp, |
| 6330 | type: pptype, idx: 7 + 3 * idx); |
| 6331 | } |
| 6332 | x = fold_convert (pptype, x); |
| 6333 | x = build_simple_mem_ref (x); |
| 6334 | } |
| 6335 | } |
| 6336 | else |
| 6337 | { |
| 6338 | lower_private_allocate (var, new_var, allocator, |
| 6339 | allocate_ptr, ilist, ctx, is_ref: false, |
| 6340 | NULL_TREE); |
| 6341 | x = build_outer_var_ref (var, ctx); |
| 6342 | |
| 6343 | if (omp_privatize_by_reference (decl: var) |
| 6344 | && !useless_type_conversion_p (ptype, TREE_TYPE (x))) |
| 6345 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 6346 | } |
| 6347 | SET_DECL_VALUE_EXPR (placeholder, x); |
| 6348 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 6349 | tree new_vard = new_var; |
| 6350 | if (omp_privatize_by_reference (decl: var)) |
| 6351 | { |
| 6352 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6353 | new_vard = TREE_OPERAND (new_var, 0); |
| 6354 | gcc_assert (DECL_P (new_vard)); |
| 6355 | } |
| 6356 | tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE; |
| 6357 | if (is_simd |
| 6358 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 6359 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 6360 | rvarp = &rvar; |
| 6361 | if (is_simd |
| 6362 | && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx, |
| 6363 | ivar, lvar, rvar: rvarp, |
| 6364 | rvar2: &rvar2)) |
| 6365 | { |
| 6366 | if (new_vard == new_var) |
| 6367 | { |
| 6368 | gcc_assert (DECL_VALUE_EXPR (new_var) == lvar); |
| 6369 | SET_DECL_VALUE_EXPR (new_var, ivar); |
| 6370 | } |
| 6371 | else |
| 6372 | { |
| 6373 | SET_DECL_VALUE_EXPR (new_vard, |
| 6374 | build_fold_addr_expr (ivar)); |
| 6375 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 6376 | } |
| 6377 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6378 | (c, unshare_expr (ivar), |
| 6379 | build_outer_var_ref (var, ctx)); |
| 6380 | if (rvarp && ctx->for_simd_scan_phase) |
| 6381 | { |
| 6382 | if (x) |
| 6383 | gimplify_and_add (x, &llist[0]); |
| 6384 | x = lang_hooks.decls.omp_clause_dtor (c, ivar); |
| 6385 | if (x) |
| 6386 | gimplify_and_add (x, &llist[1]); |
| 6387 | break; |
| 6388 | } |
| 6389 | else if (rvarp) |
| 6390 | { |
| 6391 | if (x) |
| 6392 | { |
| 6393 | gimplify_and_add (x, &llist[0]); |
| 6394 | |
| 6395 | tree ivar2 = unshare_expr (lvar); |
| 6396 | TREE_OPERAND (ivar2, 1) = sctx.idx; |
| 6397 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6398 | (c, ivar2, build_outer_var_ref (var, ctx)); |
| 6399 | gimplify_and_add (x, &llist[0]); |
| 6400 | |
| 6401 | if (rvar2) |
| 6402 | { |
| 6403 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6404 | (c, unshare_expr (rvar2), |
| 6405 | build_outer_var_ref (var, ctx)); |
| 6406 | gimplify_and_add (x, &llist[0]); |
| 6407 | } |
| 6408 | |
| 6409 | /* For types that need construction, add another |
| 6410 | private var which will be default constructed |
| 6411 | and optionally initialized with |
| 6412 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the |
| 6413 | loop we want to assign this value instead of |
| 6414 | constructing and destructing it in each |
| 6415 | iteration. */ |
| 6416 | tree nv = create_tmp_var_raw (TREE_TYPE (ivar)); |
| 6417 | gimple_add_tmp_var (nv); |
| 6418 | ctx->cb.decl_map->put (TREE_OPERAND (rvar2 |
| 6419 | ? rvar2 |
| 6420 | : ivar, 0), |
| 6421 | v: nv); |
| 6422 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6423 | (c, nv, build_outer_var_ref (var, ctx)); |
| 6424 | gimplify_and_add (x, ilist); |
| 6425 | |
| 6426 | if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 6427 | { |
| 6428 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 6429 | x = DECL_VALUE_EXPR (new_vard); |
| 6430 | tree vexpr = nv; |
| 6431 | if (new_vard != new_var) |
| 6432 | vexpr = build_fold_addr_expr (nv); |
| 6433 | SET_DECL_VALUE_EXPR (new_vard, vexpr); |
| 6434 | lower_omp (&tseq, ctx); |
| 6435 | SET_DECL_VALUE_EXPR (new_vard, x); |
| 6436 | gimple_seq_add_seq (ilist, tseq); |
| 6437 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 6438 | } |
| 6439 | |
| 6440 | x = lang_hooks.decls.omp_clause_dtor (c, nv); |
| 6441 | if (x) |
| 6442 | gimplify_and_add (x, dlist); |
| 6443 | } |
| 6444 | |
| 6445 | tree ref = build_outer_var_ref (var, ctx); |
| 6446 | x = unshare_expr (ivar); |
| 6447 | x = lang_hooks.decls.omp_clause_assign_op (c, x, |
| 6448 | ref); |
| 6449 | gimplify_and_add (x, &llist[0]); |
| 6450 | |
| 6451 | ref = build_outer_var_ref (var, ctx); |
| 6452 | x = lang_hooks.decls.omp_clause_assign_op (c, ref, |
| 6453 | rvar); |
| 6454 | gimplify_and_add (x, &llist[3]); |
| 6455 | |
| 6456 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 6457 | if (new_vard == new_var) |
| 6458 | SET_DECL_VALUE_EXPR (new_var, lvar); |
| 6459 | else |
| 6460 | SET_DECL_VALUE_EXPR (new_vard, |
| 6461 | build_fold_addr_expr (lvar)); |
| 6462 | |
| 6463 | x = lang_hooks.decls.omp_clause_dtor (c, ivar); |
| 6464 | if (x) |
| 6465 | gimplify_and_add (x, &llist[1]); |
| 6466 | |
| 6467 | tree ivar2 = unshare_expr (lvar); |
| 6468 | TREE_OPERAND (ivar2, 1) = sctx.idx; |
| 6469 | x = lang_hooks.decls.omp_clause_dtor (c, ivar2); |
| 6470 | if (x) |
| 6471 | gimplify_and_add (x, &llist[1]); |
| 6472 | |
| 6473 | if (rvar2) |
| 6474 | { |
| 6475 | x = lang_hooks.decls.omp_clause_dtor (c, rvar2); |
| 6476 | if (x) |
| 6477 | gimplify_and_add (x, &llist[1]); |
| 6478 | } |
| 6479 | break; |
| 6480 | } |
| 6481 | if (x) |
| 6482 | gimplify_and_add (x, &llist[0]); |
| 6483 | if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 6484 | { |
| 6485 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 6486 | lower_omp (&tseq, ctx); |
| 6487 | gimple_seq_add_seq (&llist[0], tseq); |
| 6488 | } |
| 6489 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 6490 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 6491 | lower_omp (&tseq, ctx); |
| 6492 | gimple_seq_add_seq (&llist[1], tseq); |
| 6493 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 6494 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 6495 | if (new_vard == new_var) |
| 6496 | SET_DECL_VALUE_EXPR (new_var, lvar); |
| 6497 | else |
| 6498 | SET_DECL_VALUE_EXPR (new_vard, |
| 6499 | build_fold_addr_expr (lvar)); |
| 6500 | x = lang_hooks.decls.omp_clause_dtor (c, ivar); |
| 6501 | if (x) |
| 6502 | gimplify_and_add (x, &llist[1]); |
| 6503 | break; |
| 6504 | } |
| 6505 | /* If this is a reference to constant size reduction var |
| 6506 | with placeholder, we haven't emitted the initializer |
| 6507 | for it because it is undesirable if SIMD arrays are used. |
| 6508 | But if they aren't used, we need to emit the deferred |
| 6509 | initialization now. */ |
| 6510 | else if (omp_privatize_by_reference (decl: var) && is_simd) |
| 6511 | handle_simd_reference (loc: clause_loc, new_vard, ilist); |
| 6512 | |
| 6513 | tree lab2 = NULL_TREE; |
| 6514 | if (cond) |
| 6515 | { |
| 6516 | gimple *g; |
| 6517 | if (!is_parallel_ctx (ctx)) |
| 6518 | { |
| 6519 | tree condv = create_tmp_var (boolean_type_node); |
| 6520 | tree m = build_simple_mem_ref (cond); |
| 6521 | g = gimple_build_assign (condv, m); |
| 6522 | gimple_seq_add_stmt (ilist, g); |
| 6523 | tree lab1 |
| 6524 | = create_artificial_label (UNKNOWN_LOCATION); |
| 6525 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 6526 | g = gimple_build_cond (NE_EXPR, condv, |
| 6527 | boolean_false_node, |
| 6528 | lab2, lab1); |
| 6529 | gimple_seq_add_stmt (ilist, g); |
| 6530 | gimple_seq_add_stmt (ilist, |
| 6531 | gimple_build_label (label: lab1)); |
| 6532 | } |
| 6533 | g = gimple_build_assign (build_simple_mem_ref (cond), |
| 6534 | boolean_true_node); |
| 6535 | gimple_seq_add_stmt (ilist, g); |
| 6536 | } |
| 6537 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6538 | (c, unshare_expr (new_var), |
| 6539 | cond ? NULL_TREE |
| 6540 | : build_outer_var_ref (var, ctx)); |
| 6541 | if (x) |
| 6542 | gimplify_and_add (x, ilist); |
| 6543 | |
| 6544 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 6545 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 6546 | { |
| 6547 | if (ctx->for_simd_scan_phase) |
| 6548 | goto do_dtor; |
| 6549 | if (x || (!is_simd |
| 6550 | && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))) |
| 6551 | { |
| 6552 | tree nv = create_tmp_var_raw (TREE_TYPE (new_var)); |
| 6553 | gimple_add_tmp_var (nv); |
| 6554 | ctx->cb.decl_map->put (k: new_vard, v: nv); |
| 6555 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6556 | (c, nv, build_outer_var_ref (var, ctx)); |
| 6557 | if (x) |
| 6558 | gimplify_and_add (x, ilist); |
| 6559 | if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 6560 | { |
| 6561 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 6562 | tree vexpr = nv; |
| 6563 | if (new_vard != new_var) |
| 6564 | vexpr = build_fold_addr_expr (nv); |
| 6565 | SET_DECL_VALUE_EXPR (new_vard, vexpr); |
| 6566 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 6567 | lower_omp (&tseq, ctx); |
| 6568 | SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); |
| 6569 | DECL_HAS_VALUE_EXPR_P (new_vard) = 0; |
| 6570 | gimple_seq_add_seq (ilist, tseq); |
| 6571 | } |
| 6572 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 6573 | if (is_simd && ctx->scan_exclusive) |
| 6574 | { |
| 6575 | tree nv2 |
| 6576 | = create_tmp_var_raw (TREE_TYPE (new_var)); |
| 6577 | gimple_add_tmp_var (nv2); |
| 6578 | ctx->cb.decl_map->put (k: nv, v: nv2); |
| 6579 | x = lang_hooks.decls.omp_clause_default_ctor |
| 6580 | (c, nv2, build_outer_var_ref (var, ctx)); |
| 6581 | gimplify_and_add (x, ilist); |
| 6582 | x = lang_hooks.decls.omp_clause_dtor (c, nv2); |
| 6583 | if (x) |
| 6584 | gimplify_and_add (x, dlist); |
| 6585 | } |
| 6586 | x = lang_hooks.decls.omp_clause_dtor (c, nv); |
| 6587 | if (x) |
| 6588 | gimplify_and_add (x, dlist); |
| 6589 | } |
| 6590 | else if (is_simd |
| 6591 | && ctx->scan_exclusive |
| 6592 | && TREE_ADDRESSABLE (TREE_TYPE (new_var))) |
| 6593 | { |
| 6594 | tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var)); |
| 6595 | gimple_add_tmp_var (nv2); |
| 6596 | ctx->cb.decl_map->put (k: new_vard, v: nv2); |
| 6597 | x = lang_hooks.decls.omp_clause_dtor (c, nv2); |
| 6598 | if (x) |
| 6599 | gimplify_and_add (x, dlist); |
| 6600 | } |
| 6601 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 6602 | goto do_dtor; |
| 6603 | } |
| 6604 | |
| 6605 | if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 6606 | { |
| 6607 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 6608 | if (c_kind == OMP_CLAUSE_IN_REDUCTION |
| 6609 | && is_omp_target (stmt: ctx->stmt)) |
| 6610 | { |
| 6611 | tree d = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 6612 | tree oldv = NULL_TREE; |
| 6613 | gcc_assert (d); |
| 6614 | if (DECL_HAS_VALUE_EXPR_P (d)) |
| 6615 | oldv = DECL_VALUE_EXPR (d); |
| 6616 | SET_DECL_VALUE_EXPR (d, new_vard); |
| 6617 | DECL_HAS_VALUE_EXPR_P (d) = 1; |
| 6618 | lower_omp (&tseq, ctx); |
| 6619 | if (oldv) |
| 6620 | SET_DECL_VALUE_EXPR (d, oldv); |
| 6621 | else |
| 6622 | { |
| 6623 | SET_DECL_VALUE_EXPR (d, NULL_TREE); |
| 6624 | DECL_HAS_VALUE_EXPR_P (d) = 0; |
| 6625 | } |
| 6626 | } |
| 6627 | else |
| 6628 | lower_omp (&tseq, ctx); |
| 6629 | gimple_seq_add_seq (ilist, tseq); |
| 6630 | } |
| 6631 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 6632 | if (is_simd) |
| 6633 | { |
| 6634 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 6635 | lower_omp (&tseq, ctx); |
| 6636 | gimple_seq_add_seq (dlist, tseq); |
| 6637 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 6638 | } |
| 6639 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 6640 | if (cond) |
| 6641 | { |
| 6642 | if (lab2) |
| 6643 | gimple_seq_add_stmt (ilist, gimple_build_label (label: lab2)); |
| 6644 | break; |
| 6645 | } |
| 6646 | goto do_dtor; |
| 6647 | } |
| 6648 | else |
| 6649 | { |
| 6650 | x = omp_reduction_init (clause: c, TREE_TYPE (new_var)); |
| 6651 | gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE); |
| 6652 | enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); |
| 6653 | |
| 6654 | if (cond) |
| 6655 | { |
| 6656 | gimple *g; |
| 6657 | tree lab2 = NULL_TREE; |
| 6658 | /* GOMP_taskgroup_reduction_register memsets the whole |
| 6659 | array to zero. If the initializer is zero, we don't |
| 6660 | need to initialize it again, just mark it as ever |
| 6661 | used unconditionally, i.e. cond = true. */ |
| 6662 | if (initializer_zerop (x)) |
| 6663 | { |
| 6664 | g = gimple_build_assign (build_simple_mem_ref (cond), |
| 6665 | boolean_true_node); |
| 6666 | gimple_seq_add_stmt (ilist, g); |
| 6667 | break; |
| 6668 | } |
| 6669 | |
| 6670 | /* Otherwise, emit |
| 6671 | if (!cond) { cond = true; new_var = x; } */ |
| 6672 | if (!is_parallel_ctx (ctx)) |
| 6673 | { |
| 6674 | tree condv = create_tmp_var (boolean_type_node); |
| 6675 | tree m = build_simple_mem_ref (cond); |
| 6676 | g = gimple_build_assign (condv, m); |
| 6677 | gimple_seq_add_stmt (ilist, g); |
| 6678 | tree lab1 |
| 6679 | = create_artificial_label (UNKNOWN_LOCATION); |
| 6680 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 6681 | g = gimple_build_cond (NE_EXPR, condv, |
| 6682 | boolean_false_node, |
| 6683 | lab2, lab1); |
| 6684 | gimple_seq_add_stmt (ilist, g); |
| 6685 | gimple_seq_add_stmt (ilist, |
| 6686 | gimple_build_label (label: lab1)); |
| 6687 | } |
| 6688 | g = gimple_build_assign (build_simple_mem_ref (cond), |
| 6689 | boolean_true_node); |
| 6690 | gimple_seq_add_stmt (ilist, g); |
| 6691 | gimplify_assign (new_var, x, ilist); |
| 6692 | if (lab2) |
| 6693 | gimple_seq_add_stmt (ilist, gimple_build_label (label: lab2)); |
| 6694 | break; |
| 6695 | } |
| 6696 | |
| 6697 | /* reduction(-:var) sums up the partial results, so it |
| 6698 | acts identically to reduction(+:var). */ |
| 6699 | if (code == MINUS_EXPR) |
| 6700 | code = PLUS_EXPR; |
| 6701 | |
| 6702 | bool is_truth_op |
| 6703 | = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR); |
| 6704 | tree new_vard = new_var; |
| 6705 | if (is_simd && omp_privatize_by_reference (decl: var)) |
| 6706 | { |
| 6707 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 6708 | new_vard = TREE_OPERAND (new_var, 0); |
| 6709 | gcc_assert (DECL_P (new_vard)); |
| 6710 | } |
| 6711 | tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE; |
| 6712 | if (is_simd |
| 6713 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 6714 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 6715 | rvarp = &rvar; |
| 6716 | if (is_simd |
| 6717 | && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx, |
| 6718 | ivar, lvar, rvar: rvarp, |
| 6719 | rvar2: &rvar2)) |
| 6720 | { |
| 6721 | if (new_vard != new_var) |
| 6722 | { |
| 6723 | SET_DECL_VALUE_EXPR (new_vard, |
| 6724 | build_fold_addr_expr (lvar)); |
| 6725 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 6726 | } |
| 6727 | |
| 6728 | tree ref = build_outer_var_ref (var, ctx); |
| 6729 | |
| 6730 | if (rvarp) |
| 6731 | { |
| 6732 | if (ctx->for_simd_scan_phase) |
| 6733 | break; |
| 6734 | gimplify_assign (ivar, ref, &llist[0]); |
| 6735 | ref = build_outer_var_ref (var, ctx); |
| 6736 | gimplify_assign (ref, rvar, &llist[3]); |
| 6737 | break; |
| 6738 | } |
| 6739 | |
| 6740 | gimplify_assign (unshare_expr (ivar), x, &llist[0]); |
| 6741 | |
| 6742 | if (sctx.is_simt) |
| 6743 | { |
| 6744 | if (!simt_lane) |
| 6745 | simt_lane = create_tmp_var (unsigned_type_node); |
| 6746 | x = build_call_expr_internal_loc |
| 6747 | (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY, |
| 6748 | TREE_TYPE (ivar), 2, ivar, simt_lane); |
| 6749 | /* Make sure x is evaluated unconditionally. */ |
| 6750 | tree bfly_var = create_tmp_var (TREE_TYPE (ivar)); |
| 6751 | gimplify_assign (bfly_var, x, &llist[2]); |
| 6752 | x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var); |
| 6753 | gimplify_assign (ivar, x, &llist[2]); |
| 6754 | } |
| 6755 | tree ivar2 = ivar; |
| 6756 | tree ref2 = ref; |
| 6757 | if (is_truth_op) |
| 6758 | { |
| 6759 | tree zero = build_zero_cst (TREE_TYPE (ivar)); |
| 6760 | ivar2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 6761 | boolean_type_node, ivar, |
| 6762 | zero); |
| 6763 | ref2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 6764 | boolean_type_node, ref, |
| 6765 | zero); |
| 6766 | } |
| 6767 | x = build2 (code, TREE_TYPE (ref), ref2, ivar2); |
| 6768 | if (is_truth_op) |
| 6769 | x = fold_convert (TREE_TYPE (ref), x); |
| 6770 | ref = build_outer_var_ref (var, ctx); |
| 6771 | gimplify_assign (ref, x, &llist[1]); |
| 6772 | |
| 6773 | } |
| 6774 | else |
| 6775 | { |
| 6776 | lower_private_allocate (var, new_var, allocator, |
| 6777 | allocate_ptr, ilist, ctx, |
| 6778 | is_ref: false, NULL_TREE); |
| 6779 | if (omp_privatize_by_reference (decl: var) && is_simd) |
| 6780 | handle_simd_reference (loc: clause_loc, new_vard, ilist); |
| 6781 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 6782 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 6783 | break; |
| 6784 | gimplify_assign (new_var, x, ilist); |
| 6785 | if (is_simd) |
| 6786 | { |
| 6787 | tree ref = build_outer_var_ref (var, ctx); |
| 6788 | tree new_var2 = new_var; |
| 6789 | tree ref2 = ref; |
| 6790 | if (is_truth_op) |
| 6791 | { |
| 6792 | tree zero = build_zero_cst (TREE_TYPE (new_var)); |
| 6793 | new_var2 |
| 6794 | = fold_build2_loc (clause_loc, NE_EXPR, |
| 6795 | boolean_type_node, new_var, |
| 6796 | zero); |
| 6797 | ref2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 6798 | boolean_type_node, ref, |
| 6799 | zero); |
| 6800 | } |
| 6801 | x = build2 (code, TREE_TYPE (ref2), ref2, new_var2); |
| 6802 | if (is_truth_op) |
| 6803 | x = fold_convert (TREE_TYPE (new_var), x); |
| 6804 | ref = build_outer_var_ref (var, ctx); |
| 6805 | gimplify_assign (ref, x, dlist); |
| 6806 | } |
| 6807 | if (allocator) |
| 6808 | goto do_dtor; |
| 6809 | } |
| 6810 | } |
| 6811 | break; |
| 6812 | |
| 6813 | default: |
| 6814 | gcc_unreachable (); |
| 6815 | } |
| 6816 | } |
| 6817 | } |
| 6818 | if (tskred_avar) |
| 6819 | { |
| 6820 | tree clobber = build_clobber (TREE_TYPE (tskred_avar)); |
| 6821 | gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber)); |
| 6822 | } |
| 6823 | |
| 6824 | if (known_eq (sctx.max_vf, 1U)) |
| 6825 | { |
| 6826 | sctx.is_simt = false; |
| 6827 | if (ctx->lastprivate_conditional_map) |
| 6828 | { |
| 6829 | if (gimple_omp_for_combined_into_p (g: ctx->stmt)) |
| 6830 | { |
| 6831 | /* Signal to lower_omp_1 that it should use parent context. */ |
| 6832 | ctx->combined_into_simd_safelen1 = true; |
| 6833 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 6834 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 6835 | && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) |
| 6836 | { |
| 6837 | tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); |
| 6838 | omp_context *outer = ctx->outer; |
| 6839 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_SCAN) |
| 6840 | outer = outer->outer; |
| 6841 | tree *v = ctx->lastprivate_conditional_map->get (k: o); |
| 6842 | tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx: outer); |
| 6843 | tree *pv = outer->lastprivate_conditional_map->get (k: po); |
| 6844 | *v = *pv; |
| 6845 | } |
| 6846 | } |
| 6847 | else |
| 6848 | { |
| 6849 | /* When not vectorized, treat lastprivate(conditional:) like |
| 6850 | normal lastprivate, as there will be just one simd lane |
| 6851 | writing the privatized variable. */ |
| 6852 | delete ctx->lastprivate_conditional_map; |
| 6853 | ctx->lastprivate_conditional_map = NULL; |
| 6854 | } |
| 6855 | } |
| 6856 | } |
| 6857 | |
| 6858 | if (nonconst_simd_if) |
| 6859 | { |
| 6860 | if (sctx.lane == NULL_TREE) |
| 6861 | { |
| 6862 | sctx.idx = create_tmp_var (unsigned_type_node); |
| 6863 | sctx.lane = create_tmp_var (unsigned_type_node); |
| 6864 | } |
| 6865 | /* FIXME: For now. */ |
| 6866 | sctx.is_simt = false; |
| 6867 | } |
| 6868 | |
| 6869 | if (sctx.lane || sctx.is_simt) |
| 6870 | { |
| 6871 | uid = create_tmp_var (ptr_type_node, "simduid" ); |
| 6872 | /* Don't want uninit warnings on simduid, it is always uninitialized, |
| 6873 | but we use it not for the value, but for the DECL_UID only. */ |
| 6874 | suppress_warning (uid, OPT_Wuninitialized); |
| 6875 | c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_); |
| 6876 | OMP_CLAUSE__SIMDUID__DECL (c) = uid; |
| 6877 | OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: ctx->stmt); |
| 6878 | gimple_omp_for_set_clauses (gs: ctx->stmt, clauses: c); |
| 6879 | } |
| 6880 | /* Emit calls denoting privatized variables and initializing a pointer to |
| 6881 | structure that holds private variables as fields after ompdevlow pass. */ |
| 6882 | if (sctx.is_simt) |
| 6883 | { |
| 6884 | sctx.simt_eargs[0] = uid; |
| 6885 | gimple *g |
| 6886 | = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs); |
| 6887 | gimple_call_set_lhs (gs: g, lhs: uid); |
| 6888 | gimple_seq_add_stmt (ilist, g); |
| 6889 | sctx.simt_eargs.release (); |
| 6890 | |
| 6891 | simtrec = create_tmp_var (ptr_type_node, ".omp_simt" ); |
| 6892 | g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid); |
| 6893 | gimple_call_set_lhs (gs: g, lhs: simtrec); |
| 6894 | gimple_seq_add_stmt (ilist, g); |
| 6895 | } |
| 6896 | if (sctx.lane) |
| 6897 | { |
| 6898 | gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, |
| 6899 | 2 + (nonconst_simd_if != NULL), |
| 6900 | uid, integer_zero_node, |
| 6901 | nonconst_simd_if); |
| 6902 | gimple_call_set_lhs (gs: g, lhs: sctx.lane); |
| 6903 | gimple_stmt_iterator gsi = gsi_start (seq&: *gimple_omp_body_ptr (gs: ctx->stmt)); |
| 6904 | gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT); |
| 6905 | g = gimple_build_assign (sctx.lane, INTEGER_CST, |
| 6906 | build_int_cst (unsigned_type_node, 0)); |
| 6907 | gimple_seq_add_stmt (ilist, g); |
| 6908 | if (sctx.lastlane) |
| 6909 | { |
| 6910 | g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE, |
| 6911 | 2, uid, sctx.lane); |
| 6912 | gimple_call_set_lhs (gs: g, lhs: sctx.lastlane); |
| 6913 | gimple_seq_add_stmt (dlist, g); |
| 6914 | gimple_seq_add_seq (dlist, llist[3]); |
| 6915 | } |
| 6916 | /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */ |
| 6917 | if (llist[2]) |
| 6918 | { |
| 6919 | tree simt_vf = create_tmp_var (unsigned_type_node); |
| 6920 | g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0); |
| 6921 | gimple_call_set_lhs (gs: g, lhs: simt_vf); |
| 6922 | gimple_seq_add_stmt (dlist, g); |
| 6923 | |
| 6924 | tree t = build_int_cst (unsigned_type_node, 1); |
| 6925 | g = gimple_build_assign (simt_lane, INTEGER_CST, t); |
| 6926 | gimple_seq_add_stmt (dlist, g); |
| 6927 | |
| 6928 | t = build_int_cst (unsigned_type_node, 0); |
| 6929 | g = gimple_build_assign (sctx.idx, INTEGER_CST, t); |
| 6930 | gimple_seq_add_stmt (dlist, g); |
| 6931 | |
| 6932 | tree body = create_artificial_label (UNKNOWN_LOCATION); |
| 6933 | tree = create_artificial_label (UNKNOWN_LOCATION); |
| 6934 | tree end = create_artificial_label (UNKNOWN_LOCATION); |
| 6935 | gimple_seq_add_stmt (dlist, gimple_build_goto (dest: header)); |
| 6936 | gimple_seq_add_stmt (dlist, gimple_build_label (label: body)); |
| 6937 | |
| 6938 | gimple_seq_add_seq (dlist, llist[2]); |
| 6939 | |
| 6940 | g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node); |
| 6941 | gimple_seq_add_stmt (dlist, g); |
| 6942 | |
| 6943 | gimple_seq_add_stmt (dlist, gimple_build_label (label: header)); |
| 6944 | g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end); |
| 6945 | gimple_seq_add_stmt (dlist, g); |
| 6946 | |
| 6947 | gimple_seq_add_stmt (dlist, gimple_build_label (label: end)); |
| 6948 | } |
| 6949 | for (int i = 0; i < 2; i++) |
| 6950 | if (llist[i]) |
| 6951 | { |
| 6952 | tree vf = create_tmp_var (unsigned_type_node); |
| 6953 | g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid); |
| 6954 | gimple_call_set_lhs (gs: g, lhs: vf); |
| 6955 | gimple_seq *seq = i == 0 ? ilist : dlist; |
| 6956 | gimple_seq_add_stmt (seq, g); |
| 6957 | tree t = build_int_cst (unsigned_type_node, 0); |
| 6958 | g = gimple_build_assign (sctx.idx, INTEGER_CST, t); |
| 6959 | gimple_seq_add_stmt (seq, g); |
| 6960 | tree body = create_artificial_label (UNKNOWN_LOCATION); |
| 6961 | tree = create_artificial_label (UNKNOWN_LOCATION); |
| 6962 | tree end = create_artificial_label (UNKNOWN_LOCATION); |
| 6963 | gimple_seq_add_stmt (seq, gimple_build_goto (dest: header)); |
| 6964 | gimple_seq_add_stmt (seq, gimple_build_label (label: body)); |
| 6965 | gimple_seq_add_seq (seq, llist[i]); |
| 6966 | t = build_int_cst (unsigned_type_node, 1); |
| 6967 | g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t); |
| 6968 | gimple_seq_add_stmt (seq, g); |
| 6969 | gimple_seq_add_stmt (seq, gimple_build_label (label: header)); |
| 6970 | g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end); |
| 6971 | gimple_seq_add_stmt (seq, g); |
| 6972 | gimple_seq_add_stmt (seq, gimple_build_label (label: end)); |
| 6973 | } |
| 6974 | } |
| 6975 | if (sctx.is_simt) |
| 6976 | { |
| 6977 | gimple_seq_add_seq (dlist, sctx.simt_dlist); |
| 6978 | gimple *g |
| 6979 | = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec); |
| 6980 | gimple_seq_add_stmt (dlist, g); |
| 6981 | } |
| 6982 | |
| 6983 | /* The copyin sequence is not to be executed by the main thread, since |
| 6984 | that would result in self-copies. Perhaps not visible to scalars, |
| 6985 | but it certainly is to C++ operator=. */ |
| 6986 | if (copyin_seq) |
| 6987 | { |
| 6988 | x = build_call_expr (builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM), |
| 6989 | 0); |
| 6990 | x = build2 (NE_EXPR, boolean_type_node, x, |
| 6991 | build_int_cst (TREE_TYPE (x), 0)); |
| 6992 | x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL); |
| 6993 | gimplify_and_add (x, ilist); |
| 6994 | } |
| 6995 | |
| 6996 | /* If any copyin variable is passed by reference, we must ensure the |
| 6997 | master thread doesn't modify it before it is copied over in all |
| 6998 | threads. Similarly for variables in both firstprivate and |
| 6999 | lastprivate clauses we need to ensure the lastprivate copying |
| 7000 | happens after firstprivate copying in all threads. And similarly |
| 7001 | for UDRs if initializer expression refers to omp_orig. */ |
| 7002 | if (copyin_by_ref || lastprivate_firstprivate |
| 7003 | || (reduction_omp_orig_ref |
| 7004 | && !ctx->scan_inclusive |
| 7005 | && !ctx->scan_exclusive)) |
| 7006 | { |
| 7007 | /* Don't add any barrier for #pragma omp simd or |
| 7008 | #pragma omp distribute. */ |
| 7009 | if (!is_task_ctx (ctx) |
| 7010 | && (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR |
| 7011 | || gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_FOR)) |
| 7012 | gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE)); |
| 7013 | } |
| 7014 | |
| 7015 | /* If max_vf is non-zero, then we can use only a vectorization factor |
| 7016 | up to the max_vf we chose. So stick it into the safelen clause. */ |
| 7017 | if (maybe_ne (a: sctx.max_vf, b: 0U)) |
| 7018 | { |
| 7019 | tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt), |
| 7020 | kind: OMP_CLAUSE_SAFELEN); |
| 7021 | poly_uint64 safe_len; |
| 7022 | if (c == NULL_TREE |
| 7023 | || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), value: &safe_len) |
| 7024 | && maybe_gt (safe_len, sctx.max_vf))) |
| 7025 | { |
| 7026 | c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN); |
| 7027 | OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node, |
| 7028 | sctx.max_vf); |
| 7029 | OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: ctx->stmt); |
| 7030 | gimple_omp_for_set_clauses (gs: ctx->stmt, clauses: c); |
| 7031 | } |
| 7032 | } |
| 7033 | } |
| 7034 | |
| 7035 | /* Create temporary variables for lastprivate(conditional:) implementation |
| 7036 | in context CTX with CLAUSES. */ |
| 7037 | |
| 7038 | static void |
| 7039 | lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx) |
| 7040 | { |
| 7041 | tree iter_type = NULL_TREE; |
| 7042 | tree cond_ptr = NULL_TREE; |
| 7043 | tree iter_var = NULL_TREE; |
| 7044 | bool is_simd = (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 7045 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD); |
| 7046 | tree next = *clauses; |
| 7047 | for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 7048 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7049 | && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) |
| 7050 | { |
| 7051 | if (is_simd) |
| 7052 | { |
| 7053 | tree cc = omp_find_clause (clauses: next, kind: OMP_CLAUSE__CONDTEMP_); |
| 7054 | gcc_assert (cc); |
| 7055 | if (iter_type == NULL_TREE) |
| 7056 | { |
| 7057 | iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc)); |
| 7058 | iter_var = create_tmp_var_raw (iter_type); |
| 7059 | DECL_CONTEXT (iter_var) = current_function_decl; |
| 7060 | DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1; |
| 7061 | DECL_CHAIN (iter_var) = ctx->block_vars; |
| 7062 | ctx->block_vars = iter_var; |
| 7063 | tree c3 |
| 7064 | = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_); |
| 7065 | OMP_CLAUSE__CONDTEMP__ITER (c3) = 1; |
| 7066 | OMP_CLAUSE_DECL (c3) = iter_var; |
| 7067 | OMP_CLAUSE_CHAIN (c3) = *clauses; |
| 7068 | *clauses = c3; |
| 7069 | ctx->lastprivate_conditional_map = new hash_map<tree, tree>; |
| 7070 | } |
| 7071 | next = OMP_CLAUSE_CHAIN (cc); |
| 7072 | tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); |
| 7073 | tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx); |
| 7074 | ctx->lastprivate_conditional_map->put (k: o, v); |
| 7075 | continue; |
| 7076 | } |
| 7077 | if (iter_type == NULL) |
| 7078 | { |
| 7079 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR) |
| 7080 | { |
| 7081 | struct omp_for_data fd; |
| 7082 | omp_extract_for_data (for_stmt: as_a <gomp_for *> (p: ctx->stmt), fd: &fd, |
| 7083 | NULL); |
| 7084 | iter_type = unsigned_type_for (fd.iter_type); |
| 7085 | } |
| 7086 | else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS) |
| 7087 | iter_type = unsigned_type_node; |
| 7088 | tree c2 = omp_find_clause (clauses: *clauses, kind: OMP_CLAUSE__CONDTEMP_); |
| 7089 | if (c2) |
| 7090 | { |
| 7091 | cond_ptr |
| 7092 | = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx); |
| 7093 | OMP_CLAUSE_DECL (c2) = cond_ptr; |
| 7094 | } |
| 7095 | else |
| 7096 | { |
| 7097 | cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type)); |
| 7098 | DECL_CONTEXT (cond_ptr) = current_function_decl; |
| 7099 | DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1; |
| 7100 | DECL_CHAIN (cond_ptr) = ctx->block_vars; |
| 7101 | ctx->block_vars = cond_ptr; |
| 7102 | c2 = build_omp_clause (UNKNOWN_LOCATION, |
| 7103 | OMP_CLAUSE__CONDTEMP_); |
| 7104 | OMP_CLAUSE_DECL (c2) = cond_ptr; |
| 7105 | OMP_CLAUSE_CHAIN (c2) = *clauses; |
| 7106 | *clauses = c2; |
| 7107 | } |
| 7108 | iter_var = create_tmp_var_raw (iter_type); |
| 7109 | DECL_CONTEXT (iter_var) = current_function_decl; |
| 7110 | DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1; |
| 7111 | DECL_CHAIN (iter_var) = ctx->block_vars; |
| 7112 | ctx->block_vars = iter_var; |
| 7113 | tree c3 |
| 7114 | = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_); |
| 7115 | OMP_CLAUSE__CONDTEMP__ITER (c3) = 1; |
| 7116 | OMP_CLAUSE_DECL (c3) = iter_var; |
| 7117 | OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2); |
| 7118 | OMP_CLAUSE_CHAIN (c2) = c3; |
| 7119 | ctx->lastprivate_conditional_map = new hash_map<tree, tree>; |
| 7120 | } |
| 7121 | tree v = create_tmp_var_raw (iter_type); |
| 7122 | DECL_CONTEXT (v) = current_function_decl; |
| 7123 | DECL_SEEN_IN_BIND_EXPR_P (v) = 1; |
| 7124 | DECL_CHAIN (v) = ctx->block_vars; |
| 7125 | ctx->block_vars = v; |
| 7126 | tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); |
| 7127 | ctx->lastprivate_conditional_map->put (k: o, v); |
| 7128 | } |
| 7129 | } |
| 7130 | |
| 7131 | |
| 7132 | /* Generate code to implement the LASTPRIVATE clauses. This is used for |
| 7133 | both parallel and workshare constructs. PREDICATE may be NULL if it's |
| 7134 | always true. BODY_P is the sequence to insert early initialization |
| 7135 | if needed, STMT_LIST is where the non-conditional lastprivate handling |
| 7136 | goes into and CSTMT_LIST is a sequence that needs to be run in a critical |
| 7137 | section. */ |
| 7138 | |
| 7139 | static void |
| 7140 | lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p, |
| 7141 | gimple_seq *stmt_list, gimple_seq *cstmt_list, |
| 7142 | omp_context *ctx) |
| 7143 | { |
| 7144 | tree x, c, label = NULL, orig_clauses = clauses; |
| 7145 | bool par_clauses = false; |
| 7146 | tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL; |
| 7147 | unsigned HOST_WIDE_INT conditional_off = 0; |
| 7148 | gimple_seq post_stmt_list = NULL; |
| 7149 | |
| 7150 | /* Early exit if there are no lastprivate or linear clauses. */ |
| 7151 | for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses)) |
| 7152 | if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE |
| 7153 | || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR |
| 7154 | && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses))) |
| 7155 | break; |
| 7156 | if (clauses == NULL) |
| 7157 | { |
| 7158 | /* If this was a workshare clause, see if it had been combined |
| 7159 | with its parallel. In that case, look for the clauses on the |
| 7160 | parallel statement itself. */ |
| 7161 | if (is_parallel_ctx (ctx)) |
| 7162 | return; |
| 7163 | |
| 7164 | ctx = ctx->outer; |
| 7165 | if (ctx == NULL || !is_parallel_ctx (ctx)) |
| 7166 | return; |
| 7167 | |
| 7168 | clauses = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt), |
| 7169 | kind: OMP_CLAUSE_LASTPRIVATE); |
| 7170 | if (clauses == NULL) |
| 7171 | return; |
| 7172 | par_clauses = true; |
| 7173 | } |
| 7174 | |
| 7175 | bool maybe_simt = false; |
| 7176 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 7177 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD) |
| 7178 | { |
| 7179 | maybe_simt = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__SIMT_); |
| 7180 | simduid = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__SIMDUID_); |
| 7181 | if (simduid) |
| 7182 | simduid = OMP_CLAUSE__SIMDUID__DECL (simduid); |
| 7183 | } |
| 7184 | |
| 7185 | if (predicate) |
| 7186 | { |
| 7187 | gcond *stmt; |
| 7188 | tree label_true, arm1, arm2; |
| 7189 | enum tree_code pred_code = TREE_CODE (predicate); |
| 7190 | |
| 7191 | label = create_artificial_label (UNKNOWN_LOCATION); |
| 7192 | label_true = create_artificial_label (UNKNOWN_LOCATION); |
| 7193 | if (TREE_CODE_CLASS (pred_code) == tcc_comparison) |
| 7194 | { |
| 7195 | arm1 = TREE_OPERAND (predicate, 0); |
| 7196 | arm2 = TREE_OPERAND (predicate, 1); |
| 7197 | gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue); |
| 7198 | gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue); |
| 7199 | } |
| 7200 | else |
| 7201 | { |
| 7202 | arm1 = predicate; |
| 7203 | gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue); |
| 7204 | arm2 = boolean_false_node; |
| 7205 | pred_code = NE_EXPR; |
| 7206 | } |
| 7207 | if (maybe_simt) |
| 7208 | { |
| 7209 | c = build2 (pred_code, boolean_type_node, arm1, arm2); |
| 7210 | c = fold_convert (integer_type_node, c); |
| 7211 | simtcond = create_tmp_var (integer_type_node); |
| 7212 | gimplify_assign (simtcond, c, stmt_list); |
| 7213 | gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, |
| 7214 | 1, simtcond); |
| 7215 | c = create_tmp_var (integer_type_node); |
| 7216 | gimple_call_set_lhs (gs: g, lhs: c); |
| 7217 | gimple_seq_add_stmt (stmt_list, g); |
| 7218 | stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node, |
| 7219 | label_true, label); |
| 7220 | } |
| 7221 | else |
| 7222 | stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label); |
| 7223 | gimple_seq_add_stmt (stmt_list, stmt); |
| 7224 | gimple_seq_add_stmt (stmt_list, gimple_build_label (label: label_true)); |
| 7225 | } |
| 7226 | |
| 7227 | tree cond_ptr = NULL_TREE; |
| 7228 | for (c = clauses; c ;) |
| 7229 | { |
| 7230 | tree var, new_var; |
| 7231 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 7232 | gimple_seq *this_stmt_list = stmt_list; |
| 7233 | tree lab2 = NULL_TREE; |
| 7234 | |
| 7235 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7236 | && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) |
| 7237 | && ctx->lastprivate_conditional_map |
| 7238 | && !ctx->combined_into_simd_safelen1) |
| 7239 | { |
| 7240 | gcc_assert (body_p); |
| 7241 | if (simduid) |
| 7242 | goto next; |
| 7243 | if (cond_ptr == NULL_TREE) |
| 7244 | { |
| 7245 | cond_ptr = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__CONDTEMP_); |
| 7246 | cond_ptr = OMP_CLAUSE_DECL (cond_ptr); |
| 7247 | } |
| 7248 | tree type = TREE_TYPE (TREE_TYPE (cond_ptr)); |
| 7249 | tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); |
| 7250 | tree v = *ctx->lastprivate_conditional_map->get (k: o); |
| 7251 | gimplify_assign (v, build_zero_cst (type), body_p); |
| 7252 | this_stmt_list = cstmt_list; |
| 7253 | tree mem; |
| 7254 | if (POINTER_TYPE_P (TREE_TYPE (cond_ptr))) |
| 7255 | { |
| 7256 | mem = build2 (MEM_REF, type, cond_ptr, |
| 7257 | build_int_cst (TREE_TYPE (cond_ptr), |
| 7258 | conditional_off)); |
| 7259 | conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
| 7260 | } |
| 7261 | else |
| 7262 | mem = build4 (ARRAY_REF, type, cond_ptr, |
| 7263 | size_int (conditional_off++), NULL_TREE, NULL_TREE); |
| 7264 | tree mem2 = copy_node (mem); |
| 7265 | gimple_seq seq = NULL; |
| 7266 | mem = force_gimple_operand (mem, &seq, true, NULL_TREE); |
| 7267 | gimple_seq_add_seq (this_stmt_list, seq); |
| 7268 | tree lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 7269 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 7270 | gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2); |
| 7271 | gimple_seq_add_stmt (this_stmt_list, g); |
| 7272 | gimple_seq_add_stmt (this_stmt_list, gimple_build_label (label: lab1)); |
| 7273 | gimplify_assign (mem2, v, this_stmt_list); |
| 7274 | } |
| 7275 | else if (predicate |
| 7276 | && ctx->combined_into_simd_safelen1 |
| 7277 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7278 | && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) |
| 7279 | && ctx->lastprivate_conditional_map) |
| 7280 | this_stmt_list = &post_stmt_list; |
| 7281 | |
| 7282 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7283 | || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 7284 | && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) |
| 7285 | { |
| 7286 | var = OMP_CLAUSE_DECL (c); |
| 7287 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7288 | && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) |
| 7289 | && is_taskloop_ctx (ctx)) |
| 7290 | { |
| 7291 | gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer)); |
| 7292 | new_var = lookup_decl (var, ctx: ctx->outer); |
| 7293 | } |
| 7294 | else |
| 7295 | { |
| 7296 | new_var = lookup_decl (var, ctx); |
| 7297 | /* Avoid uninitialized warnings for lastprivate and |
| 7298 | for linear iterators. */ |
| 7299 | if (predicate |
| 7300 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7301 | || OMP_CLAUSE_LINEAR_NO_COPYIN (c))) |
| 7302 | suppress_warning (new_var, OPT_Wuninitialized); |
| 7303 | } |
| 7304 | |
| 7305 | if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var)) |
| 7306 | { |
| 7307 | tree val = DECL_VALUE_EXPR (new_var); |
| 7308 | if (TREE_CODE (val) == ARRAY_REF |
| 7309 | && VAR_P (TREE_OPERAND (val, 0)) |
| 7310 | && lookup_attribute (attr_name: "omp simd array" , |
| 7311 | DECL_ATTRIBUTES (TREE_OPERAND (val, |
| 7312 | 0)))) |
| 7313 | { |
| 7314 | if (lastlane == NULL) |
| 7315 | { |
| 7316 | lastlane = create_tmp_var (unsigned_type_node); |
| 7317 | gcall *g |
| 7318 | = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE, |
| 7319 | 2, simduid, |
| 7320 | TREE_OPERAND (val, 1)); |
| 7321 | gimple_call_set_lhs (gs: g, lhs: lastlane); |
| 7322 | gimple_seq_add_stmt (this_stmt_list, g); |
| 7323 | } |
| 7324 | new_var = build4 (ARRAY_REF, TREE_TYPE (val), |
| 7325 | TREE_OPERAND (val, 0), lastlane, |
| 7326 | NULL_TREE, NULL_TREE); |
| 7327 | TREE_THIS_NOTRAP (new_var) = 1; |
| 7328 | } |
| 7329 | } |
| 7330 | else if (maybe_simt) |
| 7331 | { |
| 7332 | tree val = (DECL_HAS_VALUE_EXPR_P (new_var) |
| 7333 | ? DECL_VALUE_EXPR (new_var) |
| 7334 | : new_var); |
| 7335 | if (simtlast == NULL) |
| 7336 | { |
| 7337 | simtlast = create_tmp_var (unsigned_type_node); |
| 7338 | gcall *g = gimple_build_call_internal |
| 7339 | (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond); |
| 7340 | gimple_call_set_lhs (gs: g, lhs: simtlast); |
| 7341 | gimple_seq_add_stmt (this_stmt_list, g); |
| 7342 | } |
| 7343 | x = build_call_expr_internal_loc |
| 7344 | (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX, |
| 7345 | TREE_TYPE (val), 2, val, simtlast); |
| 7346 | new_var = unshare_expr (new_var); |
| 7347 | gimplify_assign (new_var, x, this_stmt_list); |
| 7348 | new_var = unshare_expr (new_var); |
| 7349 | } |
| 7350 | |
| 7351 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7352 | && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) |
| 7353 | { |
| 7354 | lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx); |
| 7355 | gimple_seq_add_seq (this_stmt_list, |
| 7356 | OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); |
| 7357 | OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL; |
| 7358 | } |
| 7359 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 7360 | && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) |
| 7361 | { |
| 7362 | lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx); |
| 7363 | gimple_seq_add_seq (this_stmt_list, |
| 7364 | OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); |
| 7365 | OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL; |
| 7366 | } |
| 7367 | |
| 7368 | x = NULL_TREE; |
| 7369 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
| 7370 | && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) |
| 7371 | && is_taskloop_ctx (ctx)) |
| 7372 | { |
| 7373 | tree ovar = maybe_lookup_decl_in_outer_ctx (decl: var, |
| 7374 | ctx: ctx->outer->outer); |
| 7375 | if (is_global_var (t: ovar)) |
| 7376 | x = ovar; |
| 7377 | } |
| 7378 | if (!x) |
| 7379 | x = build_outer_var_ref (var, ctx, code: OMP_CLAUSE_LASTPRIVATE); |
| 7380 | if (omp_privatize_by_reference (decl: var)) |
| 7381 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 7382 | x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var); |
| 7383 | gimplify_and_add (x, this_stmt_list); |
| 7384 | |
| 7385 | if (lab2) |
| 7386 | gimple_seq_add_stmt (this_stmt_list, gimple_build_label (label: lab2)); |
| 7387 | } |
| 7388 | |
| 7389 | next: |
| 7390 | c = OMP_CLAUSE_CHAIN (c); |
| 7391 | if (c == NULL && !par_clauses) |
| 7392 | { |
| 7393 | /* If this was a workshare clause, see if it had been combined |
| 7394 | with its parallel. In that case, continue looking for the |
| 7395 | clauses also on the parallel statement itself. */ |
| 7396 | if (is_parallel_ctx (ctx)) |
| 7397 | break; |
| 7398 | |
| 7399 | ctx = ctx->outer; |
| 7400 | if (ctx == NULL || !is_parallel_ctx (ctx)) |
| 7401 | break; |
| 7402 | |
| 7403 | c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt), |
| 7404 | kind: OMP_CLAUSE_LASTPRIVATE); |
| 7405 | par_clauses = true; |
| 7406 | } |
| 7407 | } |
| 7408 | |
| 7409 | if (label) |
| 7410 | gimple_seq_add_stmt (stmt_list, gimple_build_label (label)); |
| 7411 | gimple_seq_add_seq (stmt_list, post_stmt_list); |
| 7412 | } |
| 7413 | |
| 7414 | /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL |
| 7415 | (which might be a placeholder). INNER is true if this is an inner |
| 7416 | axis of a multi-axis loop. FORK and JOIN are (optional) fork and |
| 7417 | join markers. Generate the before-loop forking sequence in |
| 7418 | FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The |
| 7419 | general form of these sequences is |
| 7420 | |
| 7421 | GOACC_REDUCTION_SETUP |
| 7422 | GOACC_FORK |
| 7423 | GOACC_REDUCTION_INIT |
| 7424 | ... |
| 7425 | GOACC_REDUCTION_FINI |
| 7426 | GOACC_JOIN |
| 7427 | GOACC_REDUCTION_TEARDOWN. */ |
| 7428 | |
| 7429 | static void |
| 7430 | lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner, |
| 7431 | gcall *fork, gcall *private_marker, gcall *join, |
| 7432 | gimple_seq *fork_seq, gimple_seq *join_seq, |
| 7433 | omp_context *ctx) |
| 7434 | { |
| 7435 | gimple_seq before_fork = NULL; |
| 7436 | gimple_seq after_fork = NULL; |
| 7437 | gimple_seq before_join = NULL; |
| 7438 | gimple_seq after_join = NULL; |
| 7439 | tree init_code = NULL_TREE, fini_code = NULL_TREE, |
| 7440 | setup_code = NULL_TREE, teardown_code = NULL_TREE; |
| 7441 | unsigned offset = 0; |
| 7442 | |
| 7443 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 7444 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
| 7445 | { |
| 7446 | /* No 'reduction' clauses on OpenACC 'kernels'. */ |
| 7447 | gcc_checking_assert (!is_oacc_kernels (ctx)); |
| 7448 | /* Likewise, on OpenACC 'kernels' decomposed parts. */ |
| 7449 | gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx)); |
| 7450 | |
| 7451 | tree orig = OMP_CLAUSE_DECL (c); |
| 7452 | tree var = maybe_lookup_decl (var: orig, ctx); |
| 7453 | tree ref_to_res = NULL_TREE; |
| 7454 | tree incoming, outgoing, v1, v2, v3; |
| 7455 | bool is_private = false; |
| 7456 | |
| 7457 | enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c); |
| 7458 | if (rcode == MINUS_EXPR) |
| 7459 | rcode = PLUS_EXPR; |
| 7460 | else if (rcode == TRUTH_ANDIF_EXPR) |
| 7461 | rcode = BIT_AND_EXPR; |
| 7462 | else if (rcode == TRUTH_ORIF_EXPR) |
| 7463 | rcode = BIT_IOR_EXPR; |
| 7464 | tree op = build_int_cst (unsigned_type_node, rcode); |
| 7465 | |
| 7466 | if (!var) |
| 7467 | var = orig; |
| 7468 | |
| 7469 | incoming = outgoing = var; |
| 7470 | |
| 7471 | if (!inner) |
| 7472 | { |
| 7473 | /* See if an outer construct also reduces this variable. */ |
| 7474 | omp_context *outer = ctx; |
| 7475 | |
| 7476 | while (omp_context *probe = outer->outer) |
| 7477 | { |
| 7478 | enum gimple_code type = gimple_code (g: probe->stmt); |
| 7479 | tree cls; |
| 7480 | |
| 7481 | switch (type) |
| 7482 | { |
| 7483 | case GIMPLE_OMP_FOR: |
| 7484 | cls = gimple_omp_for_clauses (gs: probe->stmt); |
| 7485 | break; |
| 7486 | |
| 7487 | case GIMPLE_OMP_TARGET: |
| 7488 | /* No 'reduction' clauses inside OpenACC 'kernels' |
| 7489 | regions. */ |
| 7490 | gcc_checking_assert (!is_oacc_kernels (probe)); |
| 7491 | |
| 7492 | if (!is_gimple_omp_offloaded (stmt: probe->stmt)) |
| 7493 | goto do_lookup; |
| 7494 | |
| 7495 | cls = gimple_omp_target_clauses (gs: probe->stmt); |
| 7496 | break; |
| 7497 | |
| 7498 | default: |
| 7499 | goto do_lookup; |
| 7500 | } |
| 7501 | |
| 7502 | outer = probe; |
| 7503 | for (; cls; cls = OMP_CLAUSE_CHAIN (cls)) |
| 7504 | if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION |
| 7505 | && orig == OMP_CLAUSE_DECL (cls)) |
| 7506 | { |
| 7507 | incoming = outgoing = lookup_decl (var: orig, ctx: probe); |
| 7508 | goto has_outer_reduction; |
| 7509 | } |
| 7510 | else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE |
| 7511 | || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE) |
| 7512 | && orig == OMP_CLAUSE_DECL (cls)) |
| 7513 | { |
| 7514 | is_private = true; |
| 7515 | goto do_lookup; |
| 7516 | } |
| 7517 | } |
| 7518 | |
| 7519 | do_lookup: |
| 7520 | /* This is the outermost construct with this reduction, |
| 7521 | see if there's a mapping for it. */ |
| 7522 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_TARGET |
| 7523 | && maybe_lookup_field (var: orig, ctx: outer) && !is_private) |
| 7524 | { |
| 7525 | ref_to_res = build_receiver_ref (var: orig, by_ref: false, ctx: outer); |
| 7526 | if (omp_privatize_by_reference (decl: orig)) |
| 7527 | ref_to_res = build_simple_mem_ref (ref_to_res); |
| 7528 | |
| 7529 | tree type = TREE_TYPE (var); |
| 7530 | if (POINTER_TYPE_P (type)) |
| 7531 | type = TREE_TYPE (type); |
| 7532 | |
| 7533 | outgoing = var; |
| 7534 | incoming = omp_reduction_init_op (loc, op: rcode, type); |
| 7535 | } |
| 7536 | else |
| 7537 | { |
| 7538 | /* Try to look at enclosing contexts for reduction var, |
| 7539 | use original if no mapping found. */ |
| 7540 | tree t = NULL_TREE; |
| 7541 | omp_context *c = ctx->outer; |
| 7542 | while (c && !t) |
| 7543 | { |
| 7544 | t = maybe_lookup_decl (var: orig, ctx: c); |
| 7545 | c = c->outer; |
| 7546 | } |
| 7547 | incoming = outgoing = (t ? t : orig); |
| 7548 | } |
| 7549 | |
| 7550 | has_outer_reduction:; |
| 7551 | } |
| 7552 | |
| 7553 | if (!ref_to_res) |
| 7554 | ref_to_res = integer_zero_node; |
| 7555 | |
| 7556 | if (omp_privatize_by_reference (decl: orig)) |
| 7557 | { |
| 7558 | tree type = TREE_TYPE (var); |
| 7559 | const char *id = IDENTIFIER_POINTER (DECL_NAME (var)); |
| 7560 | |
| 7561 | if (!inner) |
| 7562 | { |
| 7563 | tree x = create_tmp_var (TREE_TYPE (type), id); |
| 7564 | gimplify_assign (var, build_fold_addr_expr (x), fork_seq); |
| 7565 | } |
| 7566 | |
| 7567 | v1 = create_tmp_var (type, id); |
| 7568 | v2 = create_tmp_var (type, id); |
| 7569 | v3 = create_tmp_var (type, id); |
| 7570 | |
| 7571 | gimplify_assign (v1, var, fork_seq); |
| 7572 | gimplify_assign (v2, var, fork_seq); |
| 7573 | gimplify_assign (v3, var, fork_seq); |
| 7574 | |
| 7575 | var = build_simple_mem_ref (var); |
| 7576 | v1 = build_simple_mem_ref (v1); |
| 7577 | v2 = build_simple_mem_ref (v2); |
| 7578 | v3 = build_simple_mem_ref (v3); |
| 7579 | outgoing = build_simple_mem_ref (outgoing); |
| 7580 | |
| 7581 | if (!TREE_CONSTANT (incoming)) |
| 7582 | incoming = build_simple_mem_ref (incoming); |
| 7583 | } |
| 7584 | else |
| 7585 | /* Note that 'var' might be a mem ref. */ |
| 7586 | v1 = v2 = v3 = var; |
| 7587 | |
| 7588 | /* Determine position in reduction buffer, which may be used |
| 7589 | by target. The parser has ensured that this is not a |
| 7590 | variable-sized type. */ |
| 7591 | fixed_size_mode mode |
| 7592 | = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var))); |
| 7593 | unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| 7594 | offset = (offset + align - 1) & ~(align - 1); |
| 7595 | tree off = build_int_cst (sizetype, offset); |
| 7596 | offset += GET_MODE_SIZE (mode); |
| 7597 | |
| 7598 | if (!init_code) |
| 7599 | { |
| 7600 | init_code = build_int_cst (integer_type_node, |
| 7601 | IFN_GOACC_REDUCTION_INIT); |
| 7602 | fini_code = build_int_cst (integer_type_node, |
| 7603 | IFN_GOACC_REDUCTION_FINI); |
| 7604 | setup_code = build_int_cst (integer_type_node, |
| 7605 | IFN_GOACC_REDUCTION_SETUP); |
| 7606 | teardown_code = build_int_cst (integer_type_node, |
| 7607 | IFN_GOACC_REDUCTION_TEARDOWN); |
| 7608 | } |
| 7609 | |
| 7610 | tree setup_call |
| 7611 | = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, |
| 7612 | TREE_TYPE (var), 6, setup_code, |
| 7613 | unshare_expr (ref_to_res), |
| 7614 | unshare_expr (incoming), |
| 7615 | level, op, off); |
| 7616 | tree init_call |
| 7617 | = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, |
| 7618 | TREE_TYPE (var), 6, init_code, |
| 7619 | unshare_expr (ref_to_res), |
| 7620 | unshare_expr (v1), level, op, off); |
| 7621 | tree fini_call |
| 7622 | = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, |
| 7623 | TREE_TYPE (var), 6, fini_code, |
| 7624 | unshare_expr (ref_to_res), |
| 7625 | unshare_expr (v2), level, op, off); |
| 7626 | tree teardown_call |
| 7627 | = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, |
| 7628 | TREE_TYPE (var), 6, teardown_code, |
| 7629 | ref_to_res, unshare_expr (v3), |
| 7630 | level, op, off); |
| 7631 | |
| 7632 | gimplify_assign (unshare_expr (v1), setup_call, &before_fork); |
| 7633 | gimplify_assign (unshare_expr (v2), init_call, &after_fork); |
| 7634 | gimplify_assign (unshare_expr (v3), fini_call, &before_join); |
| 7635 | gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join); |
| 7636 | } |
| 7637 | |
| 7638 | /* Now stitch things together. */ |
| 7639 | gimple_seq_add_seq (fork_seq, before_fork); |
| 7640 | if (private_marker) |
| 7641 | gimple_seq_add_stmt (fork_seq, private_marker); |
| 7642 | if (fork) |
| 7643 | gimple_seq_add_stmt (fork_seq, fork); |
| 7644 | gimple_seq_add_seq (fork_seq, after_fork); |
| 7645 | |
| 7646 | gimple_seq_add_seq (join_seq, before_join); |
| 7647 | if (join) |
| 7648 | gimple_seq_add_stmt (join_seq, join); |
| 7649 | gimple_seq_add_seq (join_seq, after_join); |
| 7650 | } |
| 7651 | |
| 7652 | /* Generate code to implement the REDUCTION clauses, append it |
| 7653 | to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence |
| 7654 | that should be emitted also inside of the critical section, |
| 7655 | in that case clear *CLIST afterwards, otherwise leave it as is |
| 7656 | and let the caller emit it itself. */ |
| 7657 | |
| 7658 | static void |
| 7659 | lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, |
| 7660 | gimple_seq *clist, omp_context *ctx) |
| 7661 | { |
| 7662 | gimple_seq sub_seq = NULL; |
| 7663 | gimple *stmt; |
| 7664 | tree x, c; |
| 7665 | int count = 0; |
| 7666 | |
| 7667 | /* OpenACC loop reductions are handled elsewhere. */ |
| 7668 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 7669 | return; |
| 7670 | |
| 7671 | /* SIMD reductions are handled in lower_rec_input_clauses. */ |
| 7672 | if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR |
| 7673 | && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD) |
| 7674 | return; |
| 7675 | |
| 7676 | /* inscan reductions are handled elsewhere. */ |
| 7677 | if (ctx->scan_inclusive || ctx->scan_exclusive) |
| 7678 | return; |
| 7679 | |
| 7680 | /* First see if there is exactly one reduction clause. Use OMP_ATOMIC |
| 7681 | update in that case, otherwise use a lock. */ |
| 7682 | for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c)) |
| 7683 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 7684 | && !OMP_CLAUSE_REDUCTION_TASK (c)) |
| 7685 | { |
| 7686 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) |
| 7687 | || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
| 7688 | { |
| 7689 | /* Never use OMP_ATOMIC for array reductions or UDRs. */ |
| 7690 | count = -1; |
| 7691 | break; |
| 7692 | } |
| 7693 | count++; |
| 7694 | } |
| 7695 | |
| 7696 | if (count == 0) |
| 7697 | return; |
| 7698 | |
| 7699 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 7700 | { |
| 7701 | tree var, ref, new_var, orig_var; |
| 7702 | enum tree_code code; |
| 7703 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 7704 | |
| 7705 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
| 7706 | || OMP_CLAUSE_REDUCTION_TASK (c)) |
| 7707 | continue; |
| 7708 | |
| 7709 | enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION; |
| 7710 | orig_var = var = OMP_CLAUSE_DECL (c); |
| 7711 | if (TREE_CODE (var) == MEM_REF) |
| 7712 | { |
| 7713 | var = TREE_OPERAND (var, 0); |
| 7714 | if (TREE_CODE (var) == POINTER_PLUS_EXPR) |
| 7715 | var = TREE_OPERAND (var, 0); |
| 7716 | if (TREE_CODE (var) == ADDR_EXPR) |
| 7717 | var = TREE_OPERAND (var, 0); |
| 7718 | else |
| 7719 | { |
| 7720 | /* If this is a pointer or referenced based array |
| 7721 | section, the var could be private in the outer |
| 7722 | context e.g. on orphaned loop construct. Pretend this |
| 7723 | is private variable's outer reference. */ |
| 7724 | ccode = OMP_CLAUSE_PRIVATE; |
| 7725 | if (INDIRECT_REF_P (var)) |
| 7726 | var = TREE_OPERAND (var, 0); |
| 7727 | } |
| 7728 | orig_var = var; |
| 7729 | if (is_variable_sized (expr: var)) |
| 7730 | { |
| 7731 | gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); |
| 7732 | var = DECL_VALUE_EXPR (var); |
| 7733 | gcc_assert (INDIRECT_REF_P (var)); |
| 7734 | var = TREE_OPERAND (var, 0); |
| 7735 | gcc_assert (DECL_P (var)); |
| 7736 | } |
| 7737 | } |
| 7738 | new_var = lookup_decl (var, ctx); |
| 7739 | if (var == OMP_CLAUSE_DECL (c) |
| 7740 | && omp_privatize_by_reference (decl: var)) |
| 7741 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 7742 | ref = build_outer_var_ref (var, ctx, code: ccode); |
| 7743 | code = OMP_CLAUSE_REDUCTION_CODE (c); |
| 7744 | |
| 7745 | /* reduction(-:var) sums up the partial results, so it acts |
| 7746 | identically to reduction(+:var). */ |
| 7747 | if (code == MINUS_EXPR) |
| 7748 | code = PLUS_EXPR; |
| 7749 | |
| 7750 | bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR); |
| 7751 | if (count == 1) |
| 7752 | { |
| 7753 | tree addr = build_fold_addr_expr_loc (clause_loc, ref); |
| 7754 | |
| 7755 | addr = save_expr (addr); |
| 7756 | ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr); |
| 7757 | tree new_var2 = new_var; |
| 7758 | tree ref2 = ref; |
| 7759 | if (is_truth_op) |
| 7760 | { |
| 7761 | tree zero = build_zero_cst (TREE_TYPE (new_var)); |
| 7762 | new_var2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 7763 | boolean_type_node, new_var, zero); |
| 7764 | ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node, |
| 7765 | ref, zero); |
| 7766 | } |
| 7767 | x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2, |
| 7768 | new_var2); |
| 7769 | if (is_truth_op) |
| 7770 | x = fold_convert (TREE_TYPE (new_var), x); |
| 7771 | x = build2 (OMP_ATOMIC, void_type_node, addr, x); |
| 7772 | OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED; |
| 7773 | gimplify_and_add (x, stmt_seqp); |
| 7774 | return; |
| 7775 | } |
| 7776 | else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
| 7777 | { |
| 7778 | tree d = OMP_CLAUSE_DECL (c); |
| 7779 | tree type = TREE_TYPE (d); |
| 7780 | tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
| 7781 | tree i = create_tmp_var (TREE_TYPE (v)); |
| 7782 | tree ptype = build_pointer_type (TREE_TYPE (type)); |
| 7783 | tree bias = TREE_OPERAND (d, 1); |
| 7784 | d = TREE_OPERAND (d, 0); |
| 7785 | if (TREE_CODE (d) == POINTER_PLUS_EXPR) |
| 7786 | { |
| 7787 | tree b = TREE_OPERAND (d, 1); |
| 7788 | b = maybe_lookup_decl (var: b, ctx); |
| 7789 | if (b == NULL) |
| 7790 | { |
| 7791 | b = TREE_OPERAND (d, 1); |
| 7792 | b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx); |
| 7793 | } |
| 7794 | if (integer_zerop (bias)) |
| 7795 | bias = b; |
| 7796 | else |
| 7797 | { |
| 7798 | bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias); |
| 7799 | bias = fold_build2_loc (clause_loc, PLUS_EXPR, |
| 7800 | TREE_TYPE (b), b, bias); |
| 7801 | } |
| 7802 | d = TREE_OPERAND (d, 0); |
| 7803 | } |
| 7804 | /* For ref build_outer_var_ref already performs this, so |
| 7805 | only new_var needs a dereference. */ |
| 7806 | if (INDIRECT_REF_P (d)) |
| 7807 | { |
| 7808 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 7809 | gcc_assert (omp_privatize_by_reference (var) |
| 7810 | && var == orig_var); |
| 7811 | } |
| 7812 | else if (TREE_CODE (d) == ADDR_EXPR) |
| 7813 | { |
| 7814 | if (orig_var == var) |
| 7815 | { |
| 7816 | new_var = build_fold_addr_expr (new_var); |
| 7817 | ref = build_fold_addr_expr (ref); |
| 7818 | } |
| 7819 | } |
| 7820 | else |
| 7821 | { |
| 7822 | gcc_assert (orig_var == var); |
| 7823 | if (omp_privatize_by_reference (decl: var)) |
| 7824 | ref = build_fold_addr_expr (ref); |
| 7825 | } |
| 7826 | if (DECL_P (v)) |
| 7827 | { |
| 7828 | tree t = maybe_lookup_decl (var: v, ctx); |
| 7829 | if (t) |
| 7830 | v = t; |
| 7831 | else |
| 7832 | v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx); |
| 7833 | gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue); |
| 7834 | } |
| 7835 | if (!integer_zerop (bias)) |
| 7836 | { |
| 7837 | bias = fold_convert_loc (clause_loc, sizetype, bias); |
| 7838 | new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, |
| 7839 | TREE_TYPE (new_var), new_var, |
| 7840 | unshare_expr (bias)); |
| 7841 | ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, |
| 7842 | TREE_TYPE (ref), ref, bias); |
| 7843 | } |
| 7844 | new_var = fold_convert_loc (clause_loc, ptype, new_var); |
| 7845 | ref = fold_convert_loc (clause_loc, ptype, ref); |
| 7846 | tree m = create_tmp_var (ptype); |
| 7847 | gimplify_assign (m, new_var, stmt_seqp); |
| 7848 | new_var = m; |
| 7849 | m = create_tmp_var (ptype); |
| 7850 | gimplify_assign (m, ref, stmt_seqp); |
| 7851 | ref = m; |
| 7852 | gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp); |
| 7853 | tree body = create_artificial_label (UNKNOWN_LOCATION); |
| 7854 | tree end = create_artificial_label (UNKNOWN_LOCATION); |
| 7855 | gimple_seq_add_stmt (&sub_seq, gimple_build_label (label: body)); |
| 7856 | tree priv = build_simple_mem_ref_loc (clause_loc, new_var); |
| 7857 | tree out = build_simple_mem_ref_loc (clause_loc, ref); |
| 7858 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 7859 | { |
| 7860 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 7861 | tree decl_placeholder |
| 7862 | = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); |
| 7863 | SET_DECL_VALUE_EXPR (placeholder, out); |
| 7864 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 7865 | SET_DECL_VALUE_EXPR (decl_placeholder, priv); |
| 7866 | DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; |
| 7867 | lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); |
| 7868 | gimple_seq_add_seq (&sub_seq, |
| 7869 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); |
| 7870 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 7871 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; |
| 7872 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL; |
| 7873 | } |
| 7874 | else |
| 7875 | { |
| 7876 | tree out2 = out; |
| 7877 | tree priv2 = priv; |
| 7878 | if (is_truth_op) |
| 7879 | { |
| 7880 | tree zero = build_zero_cst (TREE_TYPE (out)); |
| 7881 | out2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 7882 | boolean_type_node, out, zero); |
| 7883 | priv2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 7884 | boolean_type_node, priv, zero); |
| 7885 | } |
| 7886 | x = build2 (code, TREE_TYPE (out2), out2, priv2); |
| 7887 | if (is_truth_op) |
| 7888 | x = fold_convert (TREE_TYPE (out), x); |
| 7889 | out = unshare_expr (out); |
| 7890 | gimplify_assign (out, x, &sub_seq); |
| 7891 | } |
| 7892 | gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var, |
| 7893 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 7894 | gimple_seq_add_stmt (&sub_seq, g); |
| 7895 | g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref, |
| 7896 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 7897 | gimple_seq_add_stmt (&sub_seq, g); |
| 7898 | g = gimple_build_assign (i, PLUS_EXPR, i, |
| 7899 | build_int_cst (TREE_TYPE (i), 1)); |
| 7900 | gimple_seq_add_stmt (&sub_seq, g); |
| 7901 | g = gimple_build_cond (LE_EXPR, i, v, body, end); |
| 7902 | gimple_seq_add_stmt (&sub_seq, g); |
| 7903 | gimple_seq_add_stmt (&sub_seq, gimple_build_label (label: end)); |
| 7904 | } |
| 7905 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 7906 | { |
| 7907 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 7908 | |
| 7909 | if (omp_privatize_by_reference (decl: var) |
| 7910 | && !useless_type_conversion_p (TREE_TYPE (placeholder), |
| 7911 | TREE_TYPE (ref))) |
| 7912 | ref = build_fold_addr_expr_loc (clause_loc, ref); |
| 7913 | SET_DECL_VALUE_EXPR (placeholder, ref); |
| 7914 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 7915 | lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); |
| 7916 | gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); |
| 7917 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 7918 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; |
| 7919 | } |
| 7920 | else |
| 7921 | { |
| 7922 | tree new_var2 = new_var; |
| 7923 | tree ref2 = ref; |
| 7924 | if (is_truth_op) |
| 7925 | { |
| 7926 | tree zero = build_zero_cst (TREE_TYPE (new_var)); |
| 7927 | new_var2 = fold_build2_loc (clause_loc, NE_EXPR, |
| 7928 | boolean_type_node, new_var, zero); |
| 7929 | ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node, |
| 7930 | ref, zero); |
| 7931 | } |
| 7932 | x = build2 (code, TREE_TYPE (ref), ref2, new_var2); |
| 7933 | if (is_truth_op) |
| 7934 | x = fold_convert (TREE_TYPE (new_var), x); |
| 7935 | ref = build_outer_var_ref (var, ctx); |
| 7936 | gimplify_assign (ref, x, &sub_seq); |
| 7937 | } |
| 7938 | } |
| 7939 | |
| 7940 | stmt = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START), |
| 7941 | 0); |
| 7942 | gimple_seq_add_stmt (stmt_seqp, stmt); |
| 7943 | |
| 7944 | gimple_seq_add_seq (stmt_seqp, sub_seq); |
| 7945 | |
| 7946 | if (clist) |
| 7947 | { |
| 7948 | gimple_seq_add_seq (stmt_seqp, *clist); |
| 7949 | *clist = NULL; |
| 7950 | } |
| 7951 | |
| 7952 | stmt = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END), |
| 7953 | 0); |
| 7954 | gimple_seq_add_stmt (stmt_seqp, stmt); |
| 7955 | } |
| 7956 | |
| 7957 | |
| 7958 | /* Generate code to implement the COPYPRIVATE clauses. */ |
| 7959 | |
| 7960 | static void |
| 7961 | lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist, |
| 7962 | omp_context *ctx) |
| 7963 | { |
| 7964 | tree c; |
| 7965 | |
| 7966 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 7967 | { |
| 7968 | tree var, new_var, ref, x; |
| 7969 | bool by_ref; |
| 7970 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 7971 | |
| 7972 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE) |
| 7973 | continue; |
| 7974 | |
| 7975 | var = OMP_CLAUSE_DECL (c); |
| 7976 | by_ref = use_pointer_for_field (decl: var, NULL); |
| 7977 | |
| 7978 | ref = build_sender_ref (var, ctx); |
| 7979 | x = new_var = lookup_decl_in_outer_ctx (decl: var, ctx); |
| 7980 | if (by_ref) |
| 7981 | { |
| 7982 | x = build_fold_addr_expr_loc (clause_loc, new_var); |
| 7983 | x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x); |
| 7984 | } |
| 7985 | gimplify_assign (ref, x, slist); |
| 7986 | |
| 7987 | ref = build_receiver_ref (var, by_ref: false, ctx); |
| 7988 | if (by_ref) |
| 7989 | { |
| 7990 | ref = fold_convert_loc (clause_loc, |
| 7991 | build_pointer_type (TREE_TYPE (new_var)), |
| 7992 | ref); |
| 7993 | ref = build_fold_indirect_ref_loc (clause_loc, ref); |
| 7994 | } |
| 7995 | if (omp_privatize_by_reference (decl: var)) |
| 7996 | { |
| 7997 | ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref); |
| 7998 | ref = build_simple_mem_ref_loc (clause_loc, ref); |
| 7999 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 8000 | } |
| 8001 | x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref); |
| 8002 | gimplify_and_add (x, rlist); |
| 8003 | } |
| 8004 | } |
| 8005 | |
| 8006 | |
| 8007 | /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE, |
| 8008 | and REDUCTION from the sender (aka parent) side. */ |
| 8009 | |
| 8010 | static void |
| 8011 | lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist, |
| 8012 | omp_context *ctx) |
| 8013 | { |
| 8014 | tree c, t; |
| 8015 | int ignored_looptemp = 0; |
| 8016 | bool is_taskloop = false; |
| 8017 | |
| 8018 | /* For taskloop, ignore first two _looptemp_ clauses, those are initialized |
| 8019 | by GOMP_taskloop. */ |
| 8020 | if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (g: ctx->stmt)) |
| 8021 | { |
| 8022 | ignored_looptemp = 2; |
| 8023 | is_taskloop = true; |
| 8024 | } |
| 8025 | |
| 8026 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 8027 | { |
| 8028 | tree val, ref, x, var; |
| 8029 | bool by_ref, do_in = false, do_out = false; |
| 8030 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 8031 | |
| 8032 | switch (OMP_CLAUSE_CODE (c)) |
| 8033 | { |
| 8034 | case OMP_CLAUSE_PRIVATE: |
| 8035 | if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) |
| 8036 | break; |
| 8037 | continue; |
| 8038 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 8039 | case OMP_CLAUSE_COPYIN: |
| 8040 | case OMP_CLAUSE_LASTPRIVATE: |
| 8041 | case OMP_CLAUSE_IN_REDUCTION: |
| 8042 | case OMP_CLAUSE__REDUCTEMP_: |
| 8043 | break; |
| 8044 | case OMP_CLAUSE_REDUCTION: |
| 8045 | if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c)) |
| 8046 | continue; |
| 8047 | break; |
| 8048 | case OMP_CLAUSE_SHARED: |
| 8049 | if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 8050 | break; |
| 8051 | continue; |
| 8052 | case OMP_CLAUSE__LOOPTEMP_: |
| 8053 | if (ignored_looptemp) |
| 8054 | { |
| 8055 | ignored_looptemp--; |
| 8056 | continue; |
| 8057 | } |
| 8058 | break; |
| 8059 | default: |
| 8060 | continue; |
| 8061 | } |
| 8062 | |
| 8063 | val = OMP_CLAUSE_DECL (c); |
| 8064 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 8065 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
| 8066 | && TREE_CODE (val) == MEM_REF) |
| 8067 | { |
| 8068 | val = TREE_OPERAND (val, 0); |
| 8069 | if (TREE_CODE (val) == POINTER_PLUS_EXPR) |
| 8070 | val = TREE_OPERAND (val, 0); |
| 8071 | if (INDIRECT_REF_P (val) |
| 8072 | || TREE_CODE (val) == ADDR_EXPR) |
| 8073 | val = TREE_OPERAND (val, 0); |
| 8074 | if (is_variable_sized (expr: val)) |
| 8075 | continue; |
| 8076 | } |
| 8077 | |
| 8078 | /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the |
| 8079 | outer taskloop region. */ |
| 8080 | omp_context *ctx_for_o = ctx; |
| 8081 | if (is_taskloop |
| 8082 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
| 8083 | && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 8084 | ctx_for_o = ctx->outer; |
| 8085 | |
| 8086 | var = lookup_decl_in_outer_ctx (decl: val, ctx: ctx_for_o); |
| 8087 | |
| 8088 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN |
| 8089 | && is_global_var (t: var) |
| 8090 | && (val == OMP_CLAUSE_DECL (c) |
| 8091 | || !is_task_ctx (ctx) |
| 8092 | || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE |
| 8093 | && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE |
| 8094 | || (TREE_CODE (TREE_TYPE (TREE_TYPE (val))) |
| 8095 | != POINTER_TYPE))))) |
| 8096 | continue; |
| 8097 | |
| 8098 | t = omp_member_access_dummy_var (decl: var); |
| 8099 | if (t) |
| 8100 | { |
| 8101 | var = DECL_VALUE_EXPR (var); |
| 8102 | tree o = maybe_lookup_decl_in_outer_ctx (decl: t, ctx: ctx_for_o); |
| 8103 | if (o != t) |
| 8104 | var = unshare_and_remap (x: var, from: t, to: o); |
| 8105 | else |
| 8106 | var = unshare_expr (var); |
| 8107 | } |
| 8108 | |
| 8109 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED) |
| 8110 | { |
| 8111 | /* Handle taskloop firstprivate/lastprivate, where the |
| 8112 | lastprivate on GIMPLE_OMP_TASK is represented as |
| 8113 | OMP_CLAUSE_SHARED_FIRSTPRIVATE. */ |
| 8114 | tree f = lookup_sfield (key: (splay_tree_key) &DECL_UID (val), ctx); |
| 8115 | x = omp_build_component_ref (obj: ctx->sender_decl, field: f); |
| 8116 | if (use_pointer_for_field (decl: val, shared_ctx: ctx)) |
| 8117 | var = build_fold_addr_expr (var); |
| 8118 | gimplify_assign (x, var, ilist); |
| 8119 | DECL_ABSTRACT_ORIGIN (f) = NULL; |
| 8120 | continue; |
| 8121 | } |
| 8122 | |
| 8123 | if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
| 8124 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION) |
| 8125 | || val == OMP_CLAUSE_DECL (c)) |
| 8126 | && is_variable_sized (expr: val)) |
| 8127 | continue; |
| 8128 | by_ref = use_pointer_for_field (decl: val, NULL); |
| 8129 | |
| 8130 | switch (OMP_CLAUSE_CODE (c)) |
| 8131 | { |
| 8132 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 8133 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
| 8134 | && !by_ref |
| 8135 | && is_task_ctx (ctx)) |
| 8136 | suppress_warning (var); |
| 8137 | do_in = true; |
| 8138 | break; |
| 8139 | |
| 8140 | case OMP_CLAUSE_PRIVATE: |
| 8141 | case OMP_CLAUSE_COPYIN: |
| 8142 | case OMP_CLAUSE__LOOPTEMP_: |
| 8143 | case OMP_CLAUSE__REDUCTEMP_: |
| 8144 | do_in = true; |
| 8145 | break; |
| 8146 | |
| 8147 | case OMP_CLAUSE_LASTPRIVATE: |
| 8148 | if (by_ref || omp_privatize_by_reference (decl: val)) |
| 8149 | { |
| 8150 | if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) |
| 8151 | continue; |
| 8152 | do_in = true; |
| 8153 | } |
| 8154 | else |
| 8155 | { |
| 8156 | do_out = true; |
| 8157 | if (lang_hooks.decls.omp_private_outer_ref (val)) |
| 8158 | do_in = true; |
| 8159 | } |
| 8160 | break; |
| 8161 | |
| 8162 | case OMP_CLAUSE_REDUCTION: |
| 8163 | case OMP_CLAUSE_IN_REDUCTION: |
| 8164 | do_in = true; |
| 8165 | if (val == OMP_CLAUSE_DECL (c)) |
| 8166 | { |
| 8167 | if (is_task_ctx (ctx)) |
| 8168 | by_ref = use_pointer_for_field (decl: val, shared_ctx: ctx); |
| 8169 | else |
| 8170 | do_out = !(by_ref || omp_privatize_by_reference (decl: val)); |
| 8171 | } |
| 8172 | else |
| 8173 | by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE; |
| 8174 | break; |
| 8175 | |
| 8176 | default: |
| 8177 | gcc_unreachable (); |
| 8178 | } |
| 8179 | |
| 8180 | if (do_in) |
| 8181 | { |
| 8182 | ref = build_sender_ref (var: val, ctx); |
| 8183 | x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var; |
| 8184 | gimplify_assign (ref, x, ilist); |
| 8185 | if (is_task_ctx (ctx)) |
| 8186 | DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL; |
| 8187 | } |
| 8188 | |
| 8189 | if (do_out) |
| 8190 | { |
| 8191 | ref = build_sender_ref (var: val, ctx); |
| 8192 | gimplify_assign (var, ref, olist); |
| 8193 | } |
| 8194 | } |
| 8195 | } |
| 8196 | |
| 8197 | /* Generate code to implement SHARED from the sender (aka parent) |
| 8198 | side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't |
| 8199 | list things that got automatically shared. */ |
| 8200 | |
| 8201 | static void |
| 8202 | lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx) |
| 8203 | { |
| 8204 | tree var, ovar, nvar, t, f, x, record_type; |
| 8205 | |
| 8206 | if (ctx->record_type == NULL) |
| 8207 | return; |
| 8208 | |
| 8209 | record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type; |
| 8210 | for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f)) |
| 8211 | { |
| 8212 | ovar = DECL_ABSTRACT_ORIGIN (f); |
| 8213 | if (!ovar || TREE_CODE (ovar) == FIELD_DECL) |
| 8214 | continue; |
| 8215 | |
| 8216 | nvar = maybe_lookup_decl (var: ovar, ctx); |
| 8217 | if (!nvar |
| 8218 | || !DECL_HAS_VALUE_EXPR_P (nvar) |
| 8219 | || (ctx->allocate_map |
| 8220 | && ctx->allocate_map->get (k: ovar))) |
| 8221 | continue; |
| 8222 | |
| 8223 | /* If CTX is a nested parallel directive. Find the immediately |
| 8224 | enclosing parallel or workshare construct that contains a |
| 8225 | mapping for OVAR. */ |
| 8226 | var = lookup_decl_in_outer_ctx (decl: ovar, ctx); |
| 8227 | |
| 8228 | t = omp_member_access_dummy_var (decl: var); |
| 8229 | if (t) |
| 8230 | { |
| 8231 | var = DECL_VALUE_EXPR (var); |
| 8232 | tree o = maybe_lookup_decl_in_outer_ctx (decl: t, ctx); |
| 8233 | if (o != t) |
| 8234 | var = unshare_and_remap (x: var, from: t, to: o); |
| 8235 | else |
| 8236 | var = unshare_expr (var); |
| 8237 | } |
| 8238 | |
| 8239 | if (use_pointer_for_field (decl: ovar, shared_ctx: ctx)) |
| 8240 | { |
| 8241 | x = build_sender_ref (var: ovar, ctx); |
| 8242 | if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE |
| 8243 | && TREE_TYPE (f) == TREE_TYPE (ovar)) |
| 8244 | { |
| 8245 | gcc_assert (is_parallel_ctx (ctx) |
| 8246 | && DECL_ARTIFICIAL (ovar)); |
| 8247 | /* _condtemp_ clause. */ |
| 8248 | var = build_constructor (TREE_TYPE (x), NULL); |
| 8249 | } |
| 8250 | else |
| 8251 | var = build_fold_addr_expr (var); |
| 8252 | gimplify_assign (x, var, ilist); |
| 8253 | } |
| 8254 | else |
| 8255 | { |
| 8256 | x = build_sender_ref (var: ovar, ctx); |
| 8257 | gimplify_assign (x, var, ilist); |
| 8258 | |
| 8259 | if (!TREE_READONLY (var) |
| 8260 | /* We don't need to receive a new reference to a result |
| 8261 | or parm decl. In fact we may not store to it as we will |
| 8262 | invalidate any pending RSO and generate wrong gimple |
| 8263 | during inlining. */ |
| 8264 | && !((TREE_CODE (var) == RESULT_DECL |
| 8265 | || TREE_CODE (var) == PARM_DECL) |
| 8266 | && DECL_BY_REFERENCE (var))) |
| 8267 | { |
| 8268 | x = build_sender_ref (var: ovar, ctx); |
| 8269 | gimplify_assign (var, x, olist); |
| 8270 | } |
| 8271 | } |
| 8272 | } |
| 8273 | } |
| 8274 | |
| 8275 | /* Emit an OpenACC head marker call, encapulating the partitioning and |
| 8276 | other information that must be processed by the target compiler. |
| 8277 | Return the maximum number of dimensions the associated loop might |
| 8278 | be partitioned over. */ |
| 8279 | |
| 8280 | static unsigned |
| 8281 | lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses, |
| 8282 | gimple_seq *seq, omp_context *ctx) |
| 8283 | { |
| 8284 | unsigned levels = 0; |
| 8285 | unsigned tag = 0; |
| 8286 | tree gang_static = NULL_TREE; |
| 8287 | auto_vec<tree, 5> args; |
| 8288 | |
| 8289 | args.quick_push (obj: build_int_cst |
| 8290 | (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK)); |
| 8291 | args.quick_push (obj: ddvar); |
| 8292 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 8293 | { |
| 8294 | switch (OMP_CLAUSE_CODE (c)) |
| 8295 | { |
| 8296 | case OMP_CLAUSE_GANG: |
| 8297 | tag |= OLF_DIM_GANG; |
| 8298 | gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c); |
| 8299 | /* static:* is represented by -1, and we can ignore it, as |
| 8300 | scheduling is always static. */ |
| 8301 | if (gang_static && integer_minus_onep (gang_static)) |
| 8302 | gang_static = NULL_TREE; |
| 8303 | levels++; |
| 8304 | break; |
| 8305 | |
| 8306 | case OMP_CLAUSE_WORKER: |
| 8307 | tag |= OLF_DIM_WORKER; |
| 8308 | levels++; |
| 8309 | break; |
| 8310 | |
| 8311 | case OMP_CLAUSE_VECTOR: |
| 8312 | tag |= OLF_DIM_VECTOR; |
| 8313 | levels++; |
| 8314 | break; |
| 8315 | |
| 8316 | case OMP_CLAUSE_SEQ: |
| 8317 | tag |= OLF_SEQ; |
| 8318 | break; |
| 8319 | |
| 8320 | case OMP_CLAUSE_AUTO: |
| 8321 | tag |= OLF_AUTO; |
| 8322 | break; |
| 8323 | |
| 8324 | case OMP_CLAUSE_INDEPENDENT: |
| 8325 | tag |= OLF_INDEPENDENT; |
| 8326 | break; |
| 8327 | |
| 8328 | case OMP_CLAUSE_TILE: |
| 8329 | tag |= OLF_TILE; |
| 8330 | break; |
| 8331 | |
| 8332 | case OMP_CLAUSE_REDUCTION: |
| 8333 | tag |= OLF_REDUCTION; |
| 8334 | break; |
| 8335 | |
| 8336 | default: |
| 8337 | continue; |
| 8338 | } |
| 8339 | } |
| 8340 | |
| 8341 | if (gang_static) |
| 8342 | { |
| 8343 | if (DECL_P (gang_static)) |
| 8344 | gang_static = build_outer_var_ref (var: gang_static, ctx); |
| 8345 | tag |= OLF_GANG_STATIC; |
| 8346 | } |
| 8347 | |
| 8348 | omp_context *tgt = enclosing_target_ctx (ctx); |
| 8349 | if (!tgt || is_oacc_parallel_or_serial (ctx: tgt)) |
| 8350 | ; |
| 8351 | else if (is_oacc_kernels (ctx: tgt)) |
| 8352 | /* Not using this loops handling inside OpenACC 'kernels' regions. */ |
| 8353 | gcc_unreachable (); |
| 8354 | else if (is_oacc_kernels_decomposed_part (ctx: tgt)) |
| 8355 | ; |
| 8356 | else |
| 8357 | gcc_unreachable (); |
| 8358 | |
| 8359 | /* In a parallel region, loops are implicitly INDEPENDENT. */ |
| 8360 | if (!tgt || is_oacc_parallel_or_serial (ctx: tgt)) |
| 8361 | tag |= OLF_INDEPENDENT; |
| 8362 | |
| 8363 | /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to |
| 8364 | have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */ |
| 8365 | if (tgt && is_oacc_kernels_decomposed_part (ctx: tgt)) |
| 8366 | { |
| 8367 | gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT)); |
| 8368 | gcc_assert (!(tag & OLF_AUTO)); |
| 8369 | } |
| 8370 | |
| 8371 | if (tag & OLF_TILE) |
| 8372 | /* Tiling could use all 3 levels. */ |
| 8373 | levels = 3; |
| 8374 | else |
| 8375 | { |
| 8376 | /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO. |
| 8377 | Ensure at least one level, or 2 for possible auto |
| 8378 | partitioning */ |
| 8379 | bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1) |
| 8380 | << OLF_DIM_BASE) | OLF_SEQ)); |
| 8381 | |
| 8382 | if (levels < 1u + maybe_auto) |
| 8383 | levels = 1u + maybe_auto; |
| 8384 | } |
| 8385 | |
| 8386 | args.quick_push (obj: build_int_cst (integer_type_node, levels)); |
| 8387 | args.quick_push (obj: build_int_cst (integer_type_node, tag)); |
| 8388 | if (gang_static) |
| 8389 | args.quick_push (obj: gang_static); |
| 8390 | |
| 8391 | gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args); |
| 8392 | gimple_set_location (g: call, location: loc); |
| 8393 | gimple_set_lhs (call, ddvar); |
| 8394 | gimple_seq_add_stmt (seq, call); |
| 8395 | |
| 8396 | return levels; |
| 8397 | } |
| 8398 | |
| 8399 | /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the |
| 8400 | partitioning level of the enclosed region. */ |
| 8401 | |
| 8402 | static void |
| 8403 | lower_oacc_loop_marker (location_t loc, tree ddvar, bool head, |
| 8404 | tree tofollow, gimple_seq *seq) |
| 8405 | { |
| 8406 | int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK |
| 8407 | : IFN_UNIQUE_OACC_TAIL_MARK); |
| 8408 | tree marker = build_int_cst (integer_type_node, marker_kind); |
| 8409 | int nargs = 2 + (tofollow != NULL_TREE); |
| 8410 | gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs, |
| 8411 | marker, ddvar, tofollow); |
| 8412 | gimple_set_location (g: call, location: loc); |
| 8413 | gimple_set_lhs (call, ddvar); |
| 8414 | gimple_seq_add_stmt (seq, call); |
| 8415 | } |
| 8416 | |
| 8417 | /* Generate the before and after OpenACC loop sequences. CLAUSES are |
| 8418 | the loop clauses, from which we extract reductions. Initialize |
| 8419 | HEAD and TAIL. */ |
| 8420 | |
| 8421 | static void |
| 8422 | lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker, |
| 8423 | gimple_seq *head, gimple_seq *tail, omp_context *ctx) |
| 8424 | { |
| 8425 | bool inner = false; |
| 8426 | tree ddvar = create_tmp_var (integer_type_node, ".data_dep" ); |
| 8427 | gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node)); |
| 8428 | |
| 8429 | unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, seq: head, ctx); |
| 8430 | |
| 8431 | if (private_marker) |
| 8432 | { |
| 8433 | gimple_set_location (g: private_marker, location: loc); |
| 8434 | gimple_call_set_lhs (gs: private_marker, lhs: ddvar); |
| 8435 | gimple_call_set_arg (gs: private_marker, index: 1, arg: ddvar); |
| 8436 | } |
| 8437 | |
| 8438 | tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK); |
| 8439 | tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN); |
| 8440 | |
| 8441 | gcc_assert (count); |
| 8442 | for (unsigned done = 1; count; count--, done++) |
| 8443 | { |
| 8444 | gimple_seq fork_seq = NULL; |
| 8445 | gimple_seq join_seq = NULL; |
| 8446 | |
| 8447 | tree place = build_int_cst (integer_type_node, -1); |
| 8448 | gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3, |
| 8449 | fork_kind, ddvar, place); |
| 8450 | gimple_set_location (g: fork, location: loc); |
| 8451 | gimple_set_lhs (fork, ddvar); |
| 8452 | |
| 8453 | gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3, |
| 8454 | join_kind, ddvar, place); |
| 8455 | gimple_set_location (g: join, location: loc); |
| 8456 | gimple_set_lhs (join, ddvar); |
| 8457 | |
| 8458 | /* Mark the beginning of this level sequence. */ |
| 8459 | if (inner) |
| 8460 | lower_oacc_loop_marker (loc, ddvar, head: true, |
| 8461 | tofollow: build_int_cst (integer_type_node, count), |
| 8462 | seq: &fork_seq); |
| 8463 | lower_oacc_loop_marker (loc, ddvar, head: false, |
| 8464 | tofollow: build_int_cst (integer_type_node, done), |
| 8465 | seq: &join_seq); |
| 8466 | |
| 8467 | lower_oacc_reductions (loc, clauses, level: place, inner, |
| 8468 | fork, private_marker: (count == 1) ? private_marker : NULL, |
| 8469 | join, fork_seq: &fork_seq, join_seq: &join_seq, ctx); |
| 8470 | |
| 8471 | /* Append this level to head. */ |
| 8472 | gimple_seq_add_seq (head, fork_seq); |
| 8473 | /* Prepend it to tail. */ |
| 8474 | gimple_seq_add_seq (&join_seq, *tail); |
| 8475 | *tail = join_seq; |
| 8476 | |
| 8477 | inner = true; |
| 8478 | } |
| 8479 | |
| 8480 | /* Mark the end of the sequence. */ |
| 8481 | lower_oacc_loop_marker (loc, ddvar, head: true, NULL_TREE, seq: head); |
| 8482 | lower_oacc_loop_marker (loc, ddvar, head: false, NULL_TREE, seq: tail); |
| 8483 | } |
| 8484 | |
| 8485 | /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW |
| 8486 | catch handler and return it. This prevents programs from violating the |
| 8487 | structured block semantics with throws. */ |
| 8488 | |
| 8489 | static gimple_seq |
| 8490 | maybe_catch_exception (gimple_seq body) |
| 8491 | { |
| 8492 | gimple *g; |
| 8493 | tree decl; |
| 8494 | |
| 8495 | if (!flag_exceptions) |
| 8496 | return body; |
| 8497 | |
| 8498 | if (lang_hooks.eh_protect_cleanup_actions != NULL) |
| 8499 | decl = lang_hooks.eh_protect_cleanup_actions (); |
| 8500 | else |
| 8501 | decl = builtin_decl_explicit (fncode: BUILT_IN_TRAP); |
| 8502 | |
| 8503 | g = gimple_build_eh_must_not_throw (decl); |
| 8504 | g = gimple_build_try (body, gimple_seq_alloc_with_stmt (stmt: g), |
| 8505 | GIMPLE_TRY_CATCH); |
| 8506 | |
| 8507 | return gimple_seq_alloc_with_stmt (stmt: g); |
| 8508 | } |
| 8509 | |
| 8510 | |
| 8511 | /* Routines to lower OMP directives into OMP-GIMPLE. */ |
| 8512 | |
| 8513 | /* If ctx is a worksharing context inside of a cancellable parallel |
| 8514 | region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN |
| 8515 | and conditional branch to parallel's cancel_label to handle |
| 8516 | cancellation in the implicit barrier. */ |
| 8517 | |
| 8518 | static void |
| 8519 | maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return, |
| 8520 | gimple_seq *body) |
| 8521 | { |
| 8522 | gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN); |
| 8523 | if (gimple_omp_return_nowait_p (g: omp_return)) |
| 8524 | return; |
| 8525 | for (omp_context *outer = ctx->outer; outer; outer = outer->outer) |
| 8526 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_PARALLEL |
| 8527 | && outer->cancellable) |
| 8528 | { |
| 8529 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL); |
| 8530 | tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl)); |
| 8531 | tree lhs = create_tmp_var (c_bool_type); |
| 8532 | gimple_omp_return_set_lhs (g: omp_return, lhs); |
| 8533 | tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION); |
| 8534 | gimple *g = gimple_build_cond (NE_EXPR, lhs, |
| 8535 | fold_convert (c_bool_type, |
| 8536 | boolean_false_node), |
| 8537 | outer->cancel_label, fallthru_label); |
| 8538 | gimple_seq_add_stmt (body, g); |
| 8539 | gimple_seq_add_stmt (body, gimple_build_label (label: fallthru_label)); |
| 8540 | } |
| 8541 | else if (gimple_code (g: outer->stmt) != GIMPLE_OMP_TASKGROUP |
| 8542 | && gimple_code (g: outer->stmt) != GIMPLE_OMP_SCOPE) |
| 8543 | return; |
| 8544 | } |
| 8545 | |
| 8546 | /* Find the first task_reduction or reduction clause or return NULL |
| 8547 | if there are none. */ |
| 8548 | |
| 8549 | static inline tree |
| 8550 | omp_task_reductions_find_first (tree clauses, enum tree_code code, |
| 8551 | enum omp_clause_code ccode) |
| 8552 | { |
| 8553 | while (1) |
| 8554 | { |
| 8555 | clauses = omp_find_clause (clauses, kind: ccode); |
| 8556 | if (clauses == NULL_TREE) |
| 8557 | return NULL_TREE; |
| 8558 | if (ccode != OMP_CLAUSE_REDUCTION |
| 8559 | || code == OMP_TASKLOOP |
| 8560 | || OMP_CLAUSE_REDUCTION_TASK (clauses)) |
| 8561 | return clauses; |
| 8562 | clauses = OMP_CLAUSE_CHAIN (clauses); |
| 8563 | } |
| 8564 | } |
| 8565 | |
| 8566 | static void lower_omp_task_reductions (omp_context *, enum tree_code, tree, |
| 8567 | gimple_seq *, gimple_seq *); |
| 8568 | |
| 8569 | /* Lower the OpenMP sections directive in the current statement in GSI_P. |
| 8570 | CTX is the enclosing OMP context for the current statement. */ |
| 8571 | |
| 8572 | static void |
| 8573 | lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 8574 | { |
| 8575 | tree block, control; |
| 8576 | gimple_stmt_iterator tgsi; |
| 8577 | gomp_sections *stmt; |
| 8578 | gimple *t; |
| 8579 | gbind *new_stmt, *bind; |
| 8580 | gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body; |
| 8581 | |
| 8582 | stmt = as_a <gomp_sections *> (p: gsi_stmt (i: *gsi_p)); |
| 8583 | |
| 8584 | push_gimplify_context (); |
| 8585 | |
| 8586 | dlist = NULL; |
| 8587 | ilist = NULL; |
| 8588 | |
| 8589 | tree rclauses |
| 8590 | = omp_task_reductions_find_first (clauses: gimple_omp_sections_clauses (gs: stmt), |
| 8591 | code: OMP_SECTIONS, ccode: OMP_CLAUSE_REDUCTION); |
| 8592 | tree rtmp = NULL_TREE; |
| 8593 | if (rclauses) |
| 8594 | { |
| 8595 | tree type = build_pointer_type (pointer_sized_int_node); |
| 8596 | tree temp = create_tmp_var (type); |
| 8597 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); |
| 8598 | OMP_CLAUSE_DECL (c) = temp; |
| 8599 | OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (gs: stmt); |
| 8600 | gimple_omp_sections_set_clauses (gs: stmt, clauses: c); |
| 8601 | lower_omp_task_reductions (ctx, OMP_SECTIONS, |
| 8602 | gimple_omp_sections_clauses (gs: stmt), |
| 8603 | &ilist, &tred_dlist); |
| 8604 | rclauses = c; |
| 8605 | rtmp = make_ssa_name (var: type); |
| 8606 | gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp)); |
| 8607 | } |
| 8608 | |
| 8609 | tree *clauses_ptr = gimple_omp_sections_clauses_ptr (gs: stmt); |
| 8610 | lower_lastprivate_conditional_clauses (clauses: clauses_ptr, ctx); |
| 8611 | |
| 8612 | lower_rec_input_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), |
| 8613 | ilist: &ilist, dlist: &dlist, ctx, NULL); |
| 8614 | |
| 8615 | control = create_tmp_var (unsigned_type_node, ".section" ); |
| 8616 | gimple_omp_sections_set_control (gs: stmt, control); |
| 8617 | |
| 8618 | new_body = gimple_omp_body (gs: stmt); |
| 8619 | gimple_omp_set_body (gs: stmt, NULL); |
| 8620 | tgsi = gsi_start (seq&: new_body); |
| 8621 | for (; !gsi_end_p (i: tgsi); gsi_next (i: &tgsi)) |
| 8622 | { |
| 8623 | omp_context *sctx; |
| 8624 | gimple *sec_start; |
| 8625 | |
| 8626 | sec_start = gsi_stmt (i: tgsi); |
| 8627 | sctx = maybe_lookup_ctx (stmt: sec_start); |
| 8628 | gcc_assert (sctx); |
| 8629 | |
| 8630 | lower_omp (gimple_omp_body_ptr (gs: sec_start), sctx); |
| 8631 | gsi_insert_seq_after (&tgsi, gimple_omp_body (gs: sec_start), |
| 8632 | GSI_CONTINUE_LINKING); |
| 8633 | gimple_omp_set_body (gs: sec_start, NULL); |
| 8634 | |
| 8635 | if (gsi_one_before_end_p (i: tgsi)) |
| 8636 | { |
| 8637 | gimple_seq l = NULL; |
| 8638 | lower_lastprivate_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), NULL, |
| 8639 | body_p: &ilist, stmt_list: &l, cstmt_list: &clist, ctx); |
| 8640 | gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING); |
| 8641 | gimple_omp_section_set_last (g: sec_start); |
| 8642 | } |
| 8643 | |
| 8644 | gsi_insert_after (&tgsi, gimple_build_omp_return (false), |
| 8645 | GSI_CONTINUE_LINKING); |
| 8646 | } |
| 8647 | |
| 8648 | block = make_node (BLOCK); |
| 8649 | bind = gimple_build_bind (NULL, new_body, block); |
| 8650 | |
| 8651 | olist = NULL; |
| 8652 | lower_reduction_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), stmt_seqp: &olist, |
| 8653 | clist: &clist, ctx); |
| 8654 | if (clist) |
| 8655 | { |
| 8656 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START); |
| 8657 | gcall *g = gimple_build_call (fndecl, 0); |
| 8658 | gimple_seq_add_stmt (&olist, g); |
| 8659 | gimple_seq_add_seq (&olist, clist); |
| 8660 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END); |
| 8661 | g = gimple_build_call (fndecl, 0); |
| 8662 | gimple_seq_add_stmt (&olist, g); |
| 8663 | } |
| 8664 | |
| 8665 | block = make_node (BLOCK); |
| 8666 | new_stmt = gimple_build_bind (NULL, NULL, block); |
| 8667 | gsi_replace (gsi_p, new_stmt, true); |
| 8668 | |
| 8669 | pop_gimplify_context (new_stmt); |
| 8670 | gimple_bind_append_vars (bind_stmt: new_stmt, vars: ctx->block_vars); |
| 8671 | BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind); |
| 8672 | if (BLOCK_VARS (block)) |
| 8673 | TREE_USED (block) = 1; |
| 8674 | |
| 8675 | new_body = NULL; |
| 8676 | gimple_seq_add_seq (&new_body, ilist); |
| 8677 | gimple_seq_add_stmt (&new_body, stmt); |
| 8678 | gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ()); |
| 8679 | gimple_seq_add_stmt (&new_body, bind); |
| 8680 | |
| 8681 | t = gimple_build_omp_continue (control, control); |
| 8682 | gimple_seq_add_stmt (&new_body, t); |
| 8683 | |
| 8684 | gimple_seq_add_seq (&new_body, olist); |
| 8685 | if (ctx->cancellable) |
| 8686 | gimple_seq_add_stmt (&new_body, gimple_build_label (label: ctx->cancel_label)); |
| 8687 | gimple_seq_add_seq (&new_body, dlist); |
| 8688 | |
| 8689 | new_body = maybe_catch_exception (body: new_body); |
| 8690 | |
| 8691 | bool nowait = omp_find_clause (clauses: gimple_omp_sections_clauses (gs: stmt), |
| 8692 | kind: OMP_CLAUSE_NOWAIT) != NULL_TREE; |
| 8693 | t = gimple_build_omp_return (nowait); |
| 8694 | gimple_seq_add_stmt (&new_body, t); |
| 8695 | gimple_seq_add_seq (&new_body, tred_dlist); |
| 8696 | maybe_add_implicit_barrier_cancel (ctx, omp_return: t, body: &new_body); |
| 8697 | |
| 8698 | if (rclauses) |
| 8699 | OMP_CLAUSE_DECL (rclauses) = rtmp; |
| 8700 | |
| 8701 | gimple_bind_set_body (bind_stmt: new_stmt, seq: new_body); |
| 8702 | } |
| 8703 | |
| 8704 | |
| 8705 | /* A subroutine of lower_omp_single. Expand the simple form of |
| 8706 | a GIMPLE_OMP_SINGLE, without a copyprivate clause: |
| 8707 | |
| 8708 | if (GOMP_single_start ()) |
| 8709 | BODY; |
| 8710 | [ GOMP_barrier (); ] -> unless 'nowait' is present. |
| 8711 | |
| 8712 | FIXME. It may be better to delay expanding the logic of this until |
| 8713 | pass_expand_omp. The expanded logic may make the job more difficult |
| 8714 | to a synchronization analysis pass. */ |
| 8715 | |
| 8716 | static void |
| 8717 | lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p) |
| 8718 | { |
| 8719 | location_t loc = gimple_location (g: single_stmt); |
| 8720 | tree tlabel = create_artificial_label (loc); |
| 8721 | tree flabel = create_artificial_label (loc); |
| 8722 | gimple *call, *cond; |
| 8723 | tree lhs, decl; |
| 8724 | |
| 8725 | decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_START); |
| 8726 | lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl))); |
| 8727 | call = gimple_build_call (decl, 0); |
| 8728 | gimple_call_set_lhs (gs: call, lhs); |
| 8729 | gimple_seq_add_stmt (pre_p, call); |
| 8730 | |
| 8731 | cond = gimple_build_cond (EQ_EXPR, lhs, |
| 8732 | fold_convert_loc (loc, TREE_TYPE (lhs), |
| 8733 | boolean_true_node), |
| 8734 | tlabel, flabel); |
| 8735 | gimple_seq_add_stmt (pre_p, cond); |
| 8736 | gimple_seq_add_stmt (pre_p, gimple_build_label (label: tlabel)); |
| 8737 | gimple_seq_add_seq (pre_p, gimple_omp_body (gs: single_stmt)); |
| 8738 | gimple_seq_add_stmt (pre_p, gimple_build_label (label: flabel)); |
| 8739 | } |
| 8740 | |
| 8741 | |
| 8742 | /* A subroutine of lower_omp_single. Expand the simple form of |
| 8743 | a GIMPLE_OMP_SINGLE, with a copyprivate clause: |
| 8744 | |
| 8745 | #pragma omp single copyprivate (a, b, c) |
| 8746 | |
| 8747 | Create a new structure to hold copies of 'a', 'b' and 'c' and emit: |
| 8748 | |
| 8749 | { |
| 8750 | if ((copyout_p = GOMP_single_copy_start ()) == NULL) |
| 8751 | { |
| 8752 | BODY; |
| 8753 | copyout.a = a; |
| 8754 | copyout.b = b; |
| 8755 | copyout.c = c; |
| 8756 | GOMP_single_copy_end (©out); |
| 8757 | } |
| 8758 | else |
| 8759 | { |
| 8760 | a = copyout_p->a; |
| 8761 | b = copyout_p->b; |
| 8762 | c = copyout_p->c; |
| 8763 | } |
| 8764 | GOMP_barrier (); |
| 8765 | } |
| 8766 | |
| 8767 | FIXME. It may be better to delay expanding the logic of this until |
| 8768 | pass_expand_omp. The expanded logic may make the job more difficult |
| 8769 | to a synchronization analysis pass. */ |
| 8770 | |
| 8771 | static void |
| 8772 | lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p, |
| 8773 | omp_context *ctx) |
| 8774 | { |
| 8775 | tree ptr_type, t, l0, l1, l2, bfn_decl; |
| 8776 | gimple_seq copyin_seq; |
| 8777 | location_t loc = gimple_location (g: single_stmt); |
| 8778 | |
| 8779 | ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o" ); |
| 8780 | |
| 8781 | ptr_type = build_pointer_type (ctx->record_type); |
| 8782 | ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i" ); |
| 8783 | |
| 8784 | l0 = create_artificial_label (loc); |
| 8785 | l1 = create_artificial_label (loc); |
| 8786 | l2 = create_artificial_label (loc); |
| 8787 | |
| 8788 | bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_COPY_START); |
| 8789 | t = build_call_expr_loc (loc, bfn_decl, 0); |
| 8790 | t = fold_convert_loc (loc, ptr_type, t); |
| 8791 | gimplify_assign (ctx->receiver_decl, t, pre_p); |
| 8792 | |
| 8793 | t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl, |
| 8794 | build_int_cst (ptr_type, 0)); |
| 8795 | t = build3 (COND_EXPR, void_type_node, t, |
| 8796 | build_and_jump (&l0), build_and_jump (&l1)); |
| 8797 | gimplify_and_add (t, pre_p); |
| 8798 | |
| 8799 | gimple_seq_add_stmt (pre_p, gimple_build_label (label: l0)); |
| 8800 | |
| 8801 | gimple_seq_add_seq (pre_p, gimple_omp_body (gs: single_stmt)); |
| 8802 | |
| 8803 | copyin_seq = NULL; |
| 8804 | lower_copyprivate_clauses (clauses: gimple_omp_single_clauses (gs: single_stmt), slist: pre_p, |
| 8805 | rlist: ©in_seq, ctx); |
| 8806 | |
| 8807 | t = build_fold_addr_expr_loc (loc, ctx->sender_decl); |
| 8808 | bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_COPY_END); |
| 8809 | t = build_call_expr_loc (loc, bfn_decl, 1, t); |
| 8810 | gimplify_and_add (t, pre_p); |
| 8811 | |
| 8812 | t = build_and_jump (&l2); |
| 8813 | gimplify_and_add (t, pre_p); |
| 8814 | |
| 8815 | gimple_seq_add_stmt (pre_p, gimple_build_label (label: l1)); |
| 8816 | |
| 8817 | gimple_seq_add_seq (pre_p, copyin_seq); |
| 8818 | |
| 8819 | gimple_seq_add_stmt (pre_p, gimple_build_label (label: l2)); |
| 8820 | } |
| 8821 | |
| 8822 | |
| 8823 | /* Expand code for an OpenMP single directive. */ |
| 8824 | |
| 8825 | static void |
| 8826 | lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 8827 | { |
| 8828 | tree block; |
| 8829 | gomp_single *single_stmt = as_a <gomp_single *> (p: gsi_stmt (i: *gsi_p)); |
| 8830 | gbind *bind; |
| 8831 | gimple_seq bind_body, bind_body_tail = NULL, dlist; |
| 8832 | |
| 8833 | push_gimplify_context (); |
| 8834 | |
| 8835 | block = make_node (BLOCK); |
| 8836 | bind = gimple_build_bind (NULL, NULL, block); |
| 8837 | gsi_replace (gsi_p, bind, true); |
| 8838 | bind_body = NULL; |
| 8839 | dlist = NULL; |
| 8840 | lower_rec_input_clauses (clauses: gimple_omp_single_clauses (gs: single_stmt), |
| 8841 | ilist: &bind_body, dlist: &dlist, ctx, NULL); |
| 8842 | lower_omp (gimple_omp_body_ptr (gs: single_stmt), ctx); |
| 8843 | |
| 8844 | gimple_seq_add_stmt (&bind_body, single_stmt); |
| 8845 | |
| 8846 | if (ctx->record_type) |
| 8847 | lower_omp_single_copy (single_stmt, pre_p: &bind_body, ctx); |
| 8848 | else |
| 8849 | lower_omp_single_simple (single_stmt, pre_p: &bind_body); |
| 8850 | |
| 8851 | gimple_omp_set_body (gs: single_stmt, NULL); |
| 8852 | |
| 8853 | gimple_seq_add_seq (&bind_body, dlist); |
| 8854 | |
| 8855 | bind_body = maybe_catch_exception (body: bind_body); |
| 8856 | |
| 8857 | bool nowait = omp_find_clause (clauses: gimple_omp_single_clauses (gs: single_stmt), |
| 8858 | kind: OMP_CLAUSE_NOWAIT) != NULL_TREE; |
| 8859 | gimple *g = gimple_build_omp_return (nowait); |
| 8860 | gimple_seq_add_stmt (&bind_body_tail, g); |
| 8861 | maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &bind_body_tail); |
| 8862 | if (ctx->record_type) |
| 8863 | { |
| 8864 | gimple_stmt_iterator gsi = gsi_start (seq&: bind_body_tail); |
| 8865 | tree clobber = build_clobber (ctx->record_type); |
| 8866 | gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl, |
| 8867 | clobber), GSI_SAME_STMT); |
| 8868 | } |
| 8869 | gimple_seq_add_seq (&bind_body, bind_body_tail); |
| 8870 | gimple_bind_set_body (bind_stmt: bind, seq: bind_body); |
| 8871 | |
| 8872 | pop_gimplify_context (bind); |
| 8873 | |
| 8874 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 8875 | BLOCK_VARS (block) = ctx->block_vars; |
| 8876 | if (BLOCK_VARS (block)) |
| 8877 | TREE_USED (block) = 1; |
| 8878 | } |
| 8879 | |
| 8880 | |
| 8881 | /* Lower code for an OMP scope directive. */ |
| 8882 | |
| 8883 | static void |
| 8884 | lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 8885 | { |
| 8886 | tree block; |
| 8887 | gimple *scope_stmt = gsi_stmt (i: *gsi_p); |
| 8888 | gbind *bind; |
| 8889 | gimple_seq bind_body, bind_body_tail = NULL, dlist; |
| 8890 | gimple_seq tred_dlist = NULL; |
| 8891 | |
| 8892 | push_gimplify_context (); |
| 8893 | |
| 8894 | block = make_node (BLOCK); |
| 8895 | bind = gimple_build_bind (NULL, NULL, block); |
| 8896 | gsi_replace (gsi_p, bind, true); |
| 8897 | bind_body = NULL; |
| 8898 | dlist = NULL; |
| 8899 | |
| 8900 | tree rclauses |
| 8901 | = omp_task_reductions_find_first (clauses: gimple_omp_scope_clauses (gs: scope_stmt), |
| 8902 | code: OMP_SCOPE, ccode: OMP_CLAUSE_REDUCTION); |
| 8903 | if (rclauses) |
| 8904 | { |
| 8905 | tree type = build_pointer_type (pointer_sized_int_node); |
| 8906 | tree temp = create_tmp_var (type); |
| 8907 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); |
| 8908 | OMP_CLAUSE_DECL (c) = temp; |
| 8909 | OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (gs: scope_stmt); |
| 8910 | gimple_omp_scope_set_clauses (gs: scope_stmt, clauses: c); |
| 8911 | lower_omp_task_reductions (ctx, OMP_SCOPE, |
| 8912 | gimple_omp_scope_clauses (gs: scope_stmt), |
| 8913 | &bind_body, &tred_dlist); |
| 8914 | rclauses = c; |
| 8915 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SCOPE_START); |
| 8916 | gimple *stmt = gimple_build_call (fndecl, 1, temp); |
| 8917 | gimple_seq_add_stmt (&bind_body, stmt); |
| 8918 | } |
| 8919 | |
| 8920 | lower_rec_input_clauses (clauses: gimple_omp_scope_clauses (gs: scope_stmt), |
| 8921 | ilist: &bind_body, dlist: &dlist, ctx, NULL); |
| 8922 | lower_omp (gimple_omp_body_ptr (gs: scope_stmt), ctx); |
| 8923 | |
| 8924 | gimple_seq_add_stmt (&bind_body, scope_stmt); |
| 8925 | |
| 8926 | gimple_seq_add_seq (&bind_body, gimple_omp_body (gs: scope_stmt)); |
| 8927 | |
| 8928 | gimple_omp_set_body (gs: scope_stmt, NULL); |
| 8929 | |
| 8930 | gimple_seq clist = NULL; |
| 8931 | lower_reduction_clauses (clauses: gimple_omp_scope_clauses (gs: scope_stmt), |
| 8932 | stmt_seqp: &bind_body, clist: &clist, ctx); |
| 8933 | if (clist) |
| 8934 | { |
| 8935 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START); |
| 8936 | gcall *g = gimple_build_call (fndecl, 0); |
| 8937 | gimple_seq_add_stmt (&bind_body, g); |
| 8938 | gimple_seq_add_seq (&bind_body, clist); |
| 8939 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END); |
| 8940 | g = gimple_build_call (fndecl, 0); |
| 8941 | gimple_seq_add_stmt (&bind_body, g); |
| 8942 | } |
| 8943 | |
| 8944 | gimple_seq_add_seq (&bind_body, dlist); |
| 8945 | |
| 8946 | bind_body = maybe_catch_exception (body: bind_body); |
| 8947 | |
| 8948 | bool nowait = omp_find_clause (clauses: gimple_omp_scope_clauses (gs: scope_stmt), |
| 8949 | kind: OMP_CLAUSE_NOWAIT) != NULL_TREE; |
| 8950 | gimple *g = gimple_build_omp_return (nowait); |
| 8951 | gimple_seq_add_stmt (&bind_body_tail, g); |
| 8952 | gimple_seq_add_seq (&bind_body_tail, tred_dlist); |
| 8953 | maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &bind_body_tail); |
| 8954 | if (ctx->record_type) |
| 8955 | { |
| 8956 | gimple_stmt_iterator gsi = gsi_start (seq&: bind_body_tail); |
| 8957 | tree clobber = build_clobber (ctx->record_type); |
| 8958 | gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl, |
| 8959 | clobber), GSI_SAME_STMT); |
| 8960 | } |
| 8961 | gimple_seq_add_seq (&bind_body, bind_body_tail); |
| 8962 | |
| 8963 | gimple_bind_set_body (bind_stmt: bind, seq: bind_body); |
| 8964 | |
| 8965 | pop_gimplify_context (bind); |
| 8966 | |
| 8967 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 8968 | BLOCK_VARS (block) = ctx->block_vars; |
| 8969 | if (BLOCK_VARS (block)) |
| 8970 | TREE_USED (block) = 1; |
| 8971 | } |
| 8972 | |
| 8973 | /* Lower code for an OMP dispatch directive. */ |
| 8974 | |
| 8975 | static void |
| 8976 | lower_omp_dispatch (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 8977 | { |
| 8978 | tree block; |
| 8979 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 8980 | gbind *bind; |
| 8981 | |
| 8982 | push_gimplify_context (); |
| 8983 | |
| 8984 | block = make_node (BLOCK); |
| 8985 | bind = gimple_build_bind (NULL, NULL, block); |
| 8986 | gsi_replace (gsi_p, bind, true); |
| 8987 | |
| 8988 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 8989 | gimple_bind_set_body (bind_stmt: bind, seq: maybe_catch_exception (body: gimple_omp_body (gs: stmt))); |
| 8990 | |
| 8991 | pop_gimplify_context (bind); |
| 8992 | |
| 8993 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 8994 | BLOCK_VARS (block) = ctx->block_vars; |
| 8995 | } |
| 8996 | |
| 8997 | /* Expand code for an OpenMP master or masked directive. */ |
| 8998 | |
| 8999 | static void |
| 9000 | lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 9001 | { |
| 9002 | tree block, lab = NULL, x, bfn_decl; |
| 9003 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 9004 | gbind *bind; |
| 9005 | location_t loc = gimple_location (g: stmt); |
| 9006 | gimple_seq tseq; |
| 9007 | tree filter = integer_zero_node; |
| 9008 | |
| 9009 | push_gimplify_context (); |
| 9010 | |
| 9011 | if (gimple_code (g: stmt) == GIMPLE_OMP_MASKED) |
| 9012 | { |
| 9013 | filter = omp_find_clause (clauses: gimple_omp_masked_clauses (gs: stmt), |
| 9014 | kind: OMP_CLAUSE_FILTER); |
| 9015 | if (filter) |
| 9016 | filter = fold_convert (integer_type_node, |
| 9017 | OMP_CLAUSE_FILTER_EXPR (filter)); |
| 9018 | else |
| 9019 | filter = integer_zero_node; |
| 9020 | } |
| 9021 | block = make_node (BLOCK); |
| 9022 | bind = gimple_build_bind (NULL, NULL, block); |
| 9023 | gsi_replace (gsi_p, bind, true); |
| 9024 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 9025 | |
| 9026 | bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM); |
| 9027 | x = build_call_expr_loc (loc, bfn_decl, 0); |
| 9028 | x = build2 (EQ_EXPR, boolean_type_node, x, filter); |
| 9029 | x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab)); |
| 9030 | tseq = NULL; |
| 9031 | gimplify_and_add (x, &tseq); |
| 9032 | gimple_bind_add_seq (bind_stmt: bind, seq: tseq); |
| 9033 | |
| 9034 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 9035 | gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt))); |
| 9036 | gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt)); |
| 9037 | gimple_omp_set_body (gs: stmt, NULL); |
| 9038 | |
| 9039 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: lab)); |
| 9040 | |
| 9041 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true)); |
| 9042 | |
| 9043 | pop_gimplify_context (bind); |
| 9044 | |
| 9045 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 9046 | BLOCK_VARS (block) = ctx->block_vars; |
| 9047 | } |
| 9048 | |
| 9049 | /* Helper function for lower_omp_task_reductions. For a specific PASS |
| 9050 | find out the current clause it should be processed, or return false |
| 9051 | if all have been processed already. */ |
| 9052 | |
| 9053 | static inline bool |
| 9054 | omp_task_reduction_iterate (int pass, enum tree_code code, |
| 9055 | enum omp_clause_code ccode, tree *c, tree *decl, |
| 9056 | tree *type, tree *next) |
| 9057 | { |
| 9058 | for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), kind: ccode)) |
| 9059 | { |
| 9060 | if (ccode == OMP_CLAUSE_REDUCTION |
| 9061 | && code != OMP_TASKLOOP |
| 9062 | && !OMP_CLAUSE_REDUCTION_TASK (*c)) |
| 9063 | continue; |
| 9064 | *decl = OMP_CLAUSE_DECL (*c); |
| 9065 | *type = TREE_TYPE (*decl); |
| 9066 | if (TREE_CODE (*decl) == MEM_REF) |
| 9067 | { |
| 9068 | if (pass != 1) |
| 9069 | continue; |
| 9070 | } |
| 9071 | else |
| 9072 | { |
| 9073 | if (omp_privatize_by_reference (decl: *decl)) |
| 9074 | *type = TREE_TYPE (*type); |
| 9075 | if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type)))) |
| 9076 | continue; |
| 9077 | } |
| 9078 | *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), kind: ccode); |
| 9079 | return true; |
| 9080 | } |
| 9081 | *decl = NULL_TREE; |
| 9082 | *type = NULL_TREE; |
| 9083 | *next = NULL_TREE; |
| 9084 | return false; |
| 9085 | } |
| 9086 | |
| 9087 | /* Lower task_reduction and reduction clauses (the latter unless CODE is |
| 9088 | OMP_TASKGROUP only with task modifier). Register mapping of those in |
| 9089 | START sequence and reducing them and unregister them in the END sequence. */ |
| 9090 | |
| 9091 | static void |
| 9092 | lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses, |
| 9093 | gimple_seq *start, gimple_seq *end) |
| 9094 | { |
| 9095 | enum omp_clause_code ccode |
| 9096 | = (code == OMP_TASKGROUP |
| 9097 | ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION); |
| 9098 | tree cancellable = NULL_TREE; |
| 9099 | clauses = omp_task_reductions_find_first (clauses, code, ccode); |
| 9100 | if (clauses == NULL_TREE) |
| 9101 | return; |
| 9102 | if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE) |
| 9103 | { |
| 9104 | for (omp_context *outer = ctx->outer; outer; outer = outer->outer) |
| 9105 | if (gimple_code (g: outer->stmt) == GIMPLE_OMP_PARALLEL |
| 9106 | && outer->cancellable) |
| 9107 | { |
| 9108 | cancellable = error_mark_node; |
| 9109 | break; |
| 9110 | } |
| 9111 | else if (gimple_code (g: outer->stmt) != GIMPLE_OMP_TASKGROUP |
| 9112 | && gimple_code (g: outer->stmt) != GIMPLE_OMP_SCOPE) |
| 9113 | break; |
| 9114 | } |
| 9115 | tree record_type = lang_hooks.types.make_type (RECORD_TYPE); |
| 9116 | tree *last = &TYPE_FIELDS (record_type); |
| 9117 | unsigned cnt = 0; |
| 9118 | if (cancellable) |
| 9119 | { |
| 9120 | tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, |
| 9121 | ptr_type_node); |
| 9122 | tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, |
| 9123 | integer_type_node); |
| 9124 | *last = field; |
| 9125 | DECL_CHAIN (field) = ifield; |
| 9126 | last = &DECL_CHAIN (ifield); |
| 9127 | DECL_CONTEXT (field) = record_type; |
| 9128 | if (TYPE_ALIGN (record_type) < DECL_ALIGN (field)) |
| 9129 | SET_TYPE_ALIGN (record_type, DECL_ALIGN (field)); |
| 9130 | DECL_CONTEXT (ifield) = record_type; |
| 9131 | if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield)) |
| 9132 | SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield)); |
| 9133 | } |
| 9134 | for (int pass = 0; pass < 2; pass++) |
| 9135 | { |
| 9136 | tree decl, type, next; |
| 9137 | for (tree c = clauses; |
| 9138 | omp_task_reduction_iterate (pass, code, ccode, |
| 9139 | c: &c, decl: &decl, type: &type, next: &next); c = next) |
| 9140 | { |
| 9141 | ++cnt; |
| 9142 | tree new_type = type; |
| 9143 | if (ctx->outer) |
| 9144 | new_type = remap_type (type, id: &ctx->outer->cb); |
| 9145 | tree field |
| 9146 | = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, |
| 9147 | DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE, |
| 9148 | new_type); |
| 9149 | if (DECL_P (decl) && type == TREE_TYPE (decl)) |
| 9150 | { |
| 9151 | SET_DECL_ALIGN (field, DECL_ALIGN (decl)); |
| 9152 | DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl); |
| 9153 | TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl); |
| 9154 | } |
| 9155 | else |
| 9156 | SET_DECL_ALIGN (field, TYPE_ALIGN (type)); |
| 9157 | DECL_CONTEXT (field) = record_type; |
| 9158 | if (TYPE_ALIGN (record_type) < DECL_ALIGN (field)) |
| 9159 | SET_TYPE_ALIGN (record_type, DECL_ALIGN (field)); |
| 9160 | *last = field; |
| 9161 | last = &DECL_CHAIN (field); |
| 9162 | tree bfield |
| 9163 | = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE, |
| 9164 | boolean_type_node); |
| 9165 | DECL_CONTEXT (bfield) = record_type; |
| 9166 | if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield)) |
| 9167 | SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield)); |
| 9168 | *last = bfield; |
| 9169 | last = &DECL_CHAIN (bfield); |
| 9170 | } |
| 9171 | } |
| 9172 | *last = NULL_TREE; |
| 9173 | layout_type (record_type); |
| 9174 | |
| 9175 | /* Build up an array which registers with the runtime all the reductions |
| 9176 | and deregisters them at the end. Format documented in libgomp/task.c. */ |
| 9177 | tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3); |
| 9178 | tree avar = create_tmp_var_raw (atype); |
| 9179 | gimple_add_tmp_var (avar); |
| 9180 | TREE_ADDRESSABLE (avar) = 1; |
| 9181 | tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node, |
| 9182 | NULL_TREE, NULL_TREE); |
| 9183 | tree t = build_int_cst (pointer_sized_int_node, cnt); |
| 9184 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9185 | gimple_seq seq = NULL; |
| 9186 | tree sz = fold_convert (pointer_sized_int_node, |
| 9187 | TYPE_SIZE_UNIT (record_type)); |
| 9188 | int cachesz = 64; |
| 9189 | sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz, |
| 9190 | build_int_cst (pointer_sized_int_node, cachesz - 1)); |
| 9191 | sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz, |
| 9192 | build_int_cst (pointer_sized_int_node, ~(cachesz - 1))); |
| 9193 | ctx->task_reductions.create (nelems: 1 + cnt); |
| 9194 | ctx->task_reduction_map = new hash_map<tree, unsigned>; |
| 9195 | ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST |
| 9196 | ? sz : NULL_TREE); |
| 9197 | sz = force_gimple_operand (sz, &seq, true, NULL_TREE); |
| 9198 | gimple_seq_add_seq (start, seq); |
| 9199 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node, |
| 9200 | NULL_TREE, NULL_TREE); |
| 9201 | gimple_seq_add_stmt (start, gimple_build_assign (r, sz)); |
| 9202 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2), |
| 9203 | NULL_TREE, NULL_TREE); |
| 9204 | t = build_int_cst (pointer_sized_int_node, |
| 9205 | MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz)); |
| 9206 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9207 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3), |
| 9208 | NULL_TREE, NULL_TREE); |
| 9209 | t = build_int_cst (pointer_sized_int_node, -1); |
| 9210 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9211 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4), |
| 9212 | NULL_TREE, NULL_TREE); |
| 9213 | t = build_int_cst (pointer_sized_int_node, 0); |
| 9214 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9215 | |
| 9216 | /* In end, build a loop that iterates from 0 to < omp_get_num_threads () |
| 9217 | and for each task reduction checks a bool right after the private variable |
| 9218 | within that thread's chunk; if the bool is clear, it hasn't been |
| 9219 | initialized and thus isn't going to be reduced nor destructed, otherwise |
| 9220 | reduce and destruct it. */ |
| 9221 | tree idx = create_tmp_var (size_type_node); |
| 9222 | gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node)); |
| 9223 | tree num_thr_sz = create_tmp_var (size_type_node); |
| 9224 | tree lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 9225 | tree lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 9226 | tree lab3 = NULL_TREE, lab7 = NULL_TREE; |
| 9227 | gimple *g; |
| 9228 | if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE) |
| 9229 | { |
| 9230 | /* For worksharing constructs or scope, only perform it in the master |
| 9231 | thread, with the exception of cancelled implicit barriers - then only |
| 9232 | handle the current thread. */ |
| 9233 | tree lab4 = create_artificial_label (UNKNOWN_LOCATION); |
| 9234 | t = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM); |
| 9235 | tree thr_num = create_tmp_var (integer_type_node); |
| 9236 | g = gimple_build_call (t, 0); |
| 9237 | gimple_call_set_lhs (gs: g, lhs: thr_num); |
| 9238 | gimple_seq_add_stmt (end, g); |
| 9239 | if (cancellable) |
| 9240 | { |
| 9241 | tree c; |
| 9242 | tree lab5 = create_artificial_label (UNKNOWN_LOCATION); |
| 9243 | tree lab6 = create_artificial_label (UNKNOWN_LOCATION); |
| 9244 | lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 9245 | if (code == OMP_FOR) |
| 9246 | c = gimple_omp_for_clauses (gs: ctx->stmt); |
| 9247 | else if (code == OMP_SECTIONS) |
| 9248 | c = gimple_omp_sections_clauses (gs: ctx->stmt); |
| 9249 | else /* if (code == OMP_SCOPE) */ |
| 9250 | c = gimple_omp_scope_clauses (gs: ctx->stmt); |
| 9251 | c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_)); |
| 9252 | cancellable = c; |
| 9253 | g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)), |
| 9254 | lab5, lab6); |
| 9255 | gimple_seq_add_stmt (end, g); |
| 9256 | gimple_seq_add_stmt (end, gimple_build_label (label: lab5)); |
| 9257 | g = gimple_build_assign (idx, NOP_EXPR, thr_num); |
| 9258 | gimple_seq_add_stmt (end, g); |
| 9259 | g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx, |
| 9260 | build_one_cst (TREE_TYPE (idx))); |
| 9261 | gimple_seq_add_stmt (end, g); |
| 9262 | gimple_seq_add_stmt (end, gimple_build_goto (dest: lab3)); |
| 9263 | gimple_seq_add_stmt (end, gimple_build_label (label: lab6)); |
| 9264 | } |
| 9265 | g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4); |
| 9266 | gimple_seq_add_stmt (end, g); |
| 9267 | gimple_seq_add_stmt (end, gimple_build_label (label: lab4)); |
| 9268 | } |
| 9269 | if (code != OMP_PARALLEL) |
| 9270 | { |
| 9271 | t = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_NUM_THREADS); |
| 9272 | tree num_thr = create_tmp_var (integer_type_node); |
| 9273 | g = gimple_build_call (t, 0); |
| 9274 | gimple_call_set_lhs (gs: g, lhs: num_thr); |
| 9275 | gimple_seq_add_stmt (end, g); |
| 9276 | g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr); |
| 9277 | gimple_seq_add_stmt (end, g); |
| 9278 | if (cancellable) |
| 9279 | gimple_seq_add_stmt (end, gimple_build_label (label: lab3)); |
| 9280 | } |
| 9281 | else |
| 9282 | { |
| 9283 | tree c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt), |
| 9284 | kind: OMP_CLAUSE__REDUCTEMP_); |
| 9285 | t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c)); |
| 9286 | t = fold_convert (size_type_node, t); |
| 9287 | gimplify_assign (num_thr_sz, t, end); |
| 9288 | } |
| 9289 | t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2), |
| 9290 | NULL_TREE, NULL_TREE); |
| 9291 | tree data = create_tmp_var (pointer_sized_int_node); |
| 9292 | gimple_seq_add_stmt (end, gimple_build_assign (data, t)); |
| 9293 | if (code == OMP_TASKLOOP) |
| 9294 | { |
| 9295 | lab7 = create_artificial_label (UNKNOWN_LOCATION); |
| 9296 | g = gimple_build_cond (NE_EXPR, data, |
| 9297 | build_zero_cst (pointer_sized_int_node), |
| 9298 | lab1, lab7); |
| 9299 | gimple_seq_add_stmt (end, g); |
| 9300 | } |
| 9301 | gimple_seq_add_stmt (end, gimple_build_label (label: lab1)); |
| 9302 | tree ptr; |
| 9303 | if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST) |
| 9304 | ptr = create_tmp_var (build_pointer_type (record_type)); |
| 9305 | else |
| 9306 | ptr = create_tmp_var (ptr_type_node); |
| 9307 | gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data)); |
| 9308 | |
| 9309 | tree field = TYPE_FIELDS (record_type); |
| 9310 | cnt = 0; |
| 9311 | if (cancellable) |
| 9312 | field = DECL_CHAIN (DECL_CHAIN (field)); |
| 9313 | for (int pass = 0; pass < 2; pass++) |
| 9314 | { |
| 9315 | tree decl, type, next; |
| 9316 | for (tree c = clauses; |
| 9317 | omp_task_reduction_iterate (pass, code, ccode, |
| 9318 | c: &c, decl: &decl, type: &type, next: &next); c = next) |
| 9319 | { |
| 9320 | tree var = decl, ref; |
| 9321 | if (TREE_CODE (decl) == MEM_REF) |
| 9322 | { |
| 9323 | var = TREE_OPERAND (var, 0); |
| 9324 | if (TREE_CODE (var) == POINTER_PLUS_EXPR) |
| 9325 | var = TREE_OPERAND (var, 0); |
| 9326 | tree v = var; |
| 9327 | if (TREE_CODE (var) == ADDR_EXPR) |
| 9328 | var = TREE_OPERAND (var, 0); |
| 9329 | else if (INDIRECT_REF_P (var)) |
| 9330 | var = TREE_OPERAND (var, 0); |
| 9331 | tree orig_var = var; |
| 9332 | if (is_variable_sized (expr: var)) |
| 9333 | { |
| 9334 | gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); |
| 9335 | var = DECL_VALUE_EXPR (var); |
| 9336 | gcc_assert (INDIRECT_REF_P (var)); |
| 9337 | var = TREE_OPERAND (var, 0); |
| 9338 | gcc_assert (DECL_P (var)); |
| 9339 | } |
| 9340 | t = ref = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 9341 | if (orig_var != var) |
| 9342 | gcc_assert (TREE_CODE (v) == ADDR_EXPR); |
| 9343 | else if (TREE_CODE (v) == ADDR_EXPR) |
| 9344 | t = build_fold_addr_expr (t); |
| 9345 | else if (INDIRECT_REF_P (v)) |
| 9346 | t = build_fold_indirect_ref (t); |
| 9347 | if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR) |
| 9348 | { |
| 9349 | tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1); |
| 9350 | b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx); |
| 9351 | t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b); |
| 9352 | } |
| 9353 | if (!integer_zerop (TREE_OPERAND (decl, 1))) |
| 9354 | t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, |
| 9355 | fold_convert (size_type_node, |
| 9356 | TREE_OPERAND (decl, 1))); |
| 9357 | } |
| 9358 | else |
| 9359 | { |
| 9360 | t = ref = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 9361 | if (!omp_privatize_by_reference (decl)) |
| 9362 | t = build_fold_addr_expr (t); |
| 9363 | } |
| 9364 | t = fold_convert (pointer_sized_int_node, t); |
| 9365 | seq = NULL; |
| 9366 | t = force_gimple_operand (t, &seq, true, NULL_TREE); |
| 9367 | gimple_seq_add_seq (start, seq); |
| 9368 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, |
| 9369 | size_int (7 + cnt * 3), NULL_TREE, NULL_TREE); |
| 9370 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9371 | t = unshare_expr (byte_position (field)); |
| 9372 | t = fold_convert (pointer_sized_int_node, t); |
| 9373 | ctx->task_reduction_map->put (k: c, v: cnt); |
| 9374 | ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST |
| 9375 | ? t : NULL_TREE); |
| 9376 | seq = NULL; |
| 9377 | t = force_gimple_operand (t, &seq, true, NULL_TREE); |
| 9378 | gimple_seq_add_seq (start, seq); |
| 9379 | r = build4 (ARRAY_REF, pointer_sized_int_node, avar, |
| 9380 | size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE); |
| 9381 | gimple_seq_add_stmt (start, gimple_build_assign (r, t)); |
| 9382 | |
| 9383 | tree bfield = DECL_CHAIN (field); |
| 9384 | tree cond; |
| 9385 | if (code == OMP_PARALLEL |
| 9386 | || code == OMP_FOR |
| 9387 | || code == OMP_SECTIONS |
| 9388 | || code == OMP_SCOPE) |
| 9389 | /* In parallel, worksharing or scope all threads unconditionally |
| 9390 | initialize all their task reduction private variables. */ |
| 9391 | cond = boolean_true_node; |
| 9392 | else if (TREE_TYPE (ptr) == ptr_type_node) |
| 9393 | { |
| 9394 | cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr, |
| 9395 | unshare_expr (byte_position (bfield))); |
| 9396 | seq = NULL; |
| 9397 | cond = force_gimple_operand (cond, &seq, true, NULL_TREE); |
| 9398 | gimple_seq_add_seq (end, seq); |
| 9399 | tree pbool = build_pointer_type (TREE_TYPE (bfield)); |
| 9400 | cond = build2 (MEM_REF, TREE_TYPE (bfield), cond, |
| 9401 | build_int_cst (pbool, 0)); |
| 9402 | } |
| 9403 | else |
| 9404 | cond = build3 (COMPONENT_REF, TREE_TYPE (bfield), |
| 9405 | build_simple_mem_ref (ptr), bfield, NULL_TREE); |
| 9406 | tree lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 9407 | tree lab4 = create_artificial_label (UNKNOWN_LOCATION); |
| 9408 | tree condv = create_tmp_var (boolean_type_node); |
| 9409 | gimple_seq_add_stmt (end, gimple_build_assign (condv, cond)); |
| 9410 | g = gimple_build_cond (NE_EXPR, condv, boolean_false_node, |
| 9411 | lab3, lab4); |
| 9412 | gimple_seq_add_stmt (end, g); |
| 9413 | gimple_seq_add_stmt (end, gimple_build_label (label: lab3)); |
| 9414 | if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE) |
| 9415 | { |
| 9416 | /* If this reduction doesn't need destruction and parallel |
| 9417 | has been cancelled, there is nothing to do for this |
| 9418 | reduction, so jump around the merge operation. */ |
| 9419 | tree lab5 = create_artificial_label (UNKNOWN_LOCATION); |
| 9420 | g = gimple_build_cond (NE_EXPR, cancellable, |
| 9421 | build_zero_cst (TREE_TYPE (cancellable)), |
| 9422 | lab4, lab5); |
| 9423 | gimple_seq_add_stmt (end, g); |
| 9424 | gimple_seq_add_stmt (end, gimple_build_label (label: lab5)); |
| 9425 | } |
| 9426 | |
| 9427 | tree new_var; |
| 9428 | if (TREE_TYPE (ptr) == ptr_type_node) |
| 9429 | { |
| 9430 | new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr, |
| 9431 | unshare_expr (byte_position (field))); |
| 9432 | seq = NULL; |
| 9433 | new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE); |
| 9434 | gimple_seq_add_seq (end, seq); |
| 9435 | tree pbool = build_pointer_type (TREE_TYPE (field)); |
| 9436 | new_var = build2 (MEM_REF, TREE_TYPE (field), new_var, |
| 9437 | build_int_cst (pbool, 0)); |
| 9438 | } |
| 9439 | else |
| 9440 | new_var = build3 (COMPONENT_REF, TREE_TYPE (field), |
| 9441 | build_simple_mem_ref (ptr), field, NULL_TREE); |
| 9442 | |
| 9443 | enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c); |
| 9444 | if (TREE_CODE (decl) != MEM_REF |
| 9445 | && omp_privatize_by_reference (decl)) |
| 9446 | ref = build_simple_mem_ref (ref); |
| 9447 | /* reduction(-:var) sums up the partial results, so it acts |
| 9448 | identically to reduction(+:var). */ |
| 9449 | if (rcode == MINUS_EXPR) |
| 9450 | rcode = PLUS_EXPR; |
| 9451 | if (TREE_CODE (decl) == MEM_REF) |
| 9452 | { |
| 9453 | tree type = TREE_TYPE (new_var); |
| 9454 | tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
| 9455 | tree i = create_tmp_var (TREE_TYPE (v)); |
| 9456 | tree ptype = build_pointer_type (TREE_TYPE (type)); |
| 9457 | if (DECL_P (v)) |
| 9458 | { |
| 9459 | v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx); |
| 9460 | tree vv = create_tmp_var (TREE_TYPE (v)); |
| 9461 | gimplify_assign (vv, v, start); |
| 9462 | v = vv; |
| 9463 | } |
| 9464 | ref = build4 (ARRAY_REF, pointer_sized_int_node, avar, |
| 9465 | size_int (7 + cnt * 3), NULL_TREE, NULL_TREE); |
| 9466 | new_var = build_fold_addr_expr (new_var); |
| 9467 | new_var = fold_convert (ptype, new_var); |
| 9468 | ref = fold_convert (ptype, ref); |
| 9469 | tree m = create_tmp_var (ptype); |
| 9470 | gimplify_assign (m, new_var, end); |
| 9471 | new_var = m; |
| 9472 | m = create_tmp_var (ptype); |
| 9473 | gimplify_assign (m, ref, end); |
| 9474 | ref = m; |
| 9475 | gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end); |
| 9476 | tree body = create_artificial_label (UNKNOWN_LOCATION); |
| 9477 | tree endl = create_artificial_label (UNKNOWN_LOCATION); |
| 9478 | gimple_seq_add_stmt (end, gimple_build_label (label: body)); |
| 9479 | tree priv = build_simple_mem_ref (new_var); |
| 9480 | tree out = build_simple_mem_ref (ref); |
| 9481 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 9482 | { |
| 9483 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 9484 | tree decl_placeholder |
| 9485 | = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); |
| 9486 | tree lab6 = NULL_TREE; |
| 9487 | if (cancellable) |
| 9488 | { |
| 9489 | /* If this reduction needs destruction and parallel |
| 9490 | has been cancelled, jump around the merge operation |
| 9491 | to the destruction. */ |
| 9492 | tree lab5 = create_artificial_label (UNKNOWN_LOCATION); |
| 9493 | lab6 = create_artificial_label (UNKNOWN_LOCATION); |
| 9494 | tree zero = build_zero_cst (TREE_TYPE (cancellable)); |
| 9495 | g = gimple_build_cond (NE_EXPR, cancellable, zero, |
| 9496 | lab6, lab5); |
| 9497 | gimple_seq_add_stmt (end, g); |
| 9498 | gimple_seq_add_stmt (end, gimple_build_label (label: lab5)); |
| 9499 | } |
| 9500 | SET_DECL_VALUE_EXPR (placeholder, out); |
| 9501 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 9502 | SET_DECL_VALUE_EXPR (decl_placeholder, priv); |
| 9503 | DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; |
| 9504 | lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); |
| 9505 | gimple_seq_add_seq (end, |
| 9506 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); |
| 9507 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 9508 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
| 9509 | { |
| 9510 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; |
| 9511 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL; |
| 9512 | } |
| 9513 | if (cancellable) |
| 9514 | gimple_seq_add_stmt (end, gimple_build_label (label: lab6)); |
| 9515 | tree x = lang_hooks.decls.omp_clause_dtor (c, priv); |
| 9516 | if (x) |
| 9517 | { |
| 9518 | gimple_seq tseq = NULL; |
| 9519 | gimplify_stmt (&x, &tseq); |
| 9520 | gimple_seq_add_seq (end, tseq); |
| 9521 | } |
| 9522 | } |
| 9523 | else |
| 9524 | { |
| 9525 | tree x = build2 (rcode, TREE_TYPE (out), out, priv); |
| 9526 | out = unshare_expr (out); |
| 9527 | gimplify_assign (out, x, end); |
| 9528 | } |
| 9529 | gimple *g |
| 9530 | = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var, |
| 9531 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 9532 | gimple_seq_add_stmt (end, g); |
| 9533 | g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref, |
| 9534 | TYPE_SIZE_UNIT (TREE_TYPE (type))); |
| 9535 | gimple_seq_add_stmt (end, g); |
| 9536 | g = gimple_build_assign (i, PLUS_EXPR, i, |
| 9537 | build_int_cst (TREE_TYPE (i), 1)); |
| 9538 | gimple_seq_add_stmt (end, g); |
| 9539 | g = gimple_build_cond (LE_EXPR, i, v, body, endl); |
| 9540 | gimple_seq_add_stmt (end, g); |
| 9541 | gimple_seq_add_stmt (end, gimple_build_label (label: endl)); |
| 9542 | } |
| 9543 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 9544 | { |
| 9545 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 9546 | tree oldv = NULL_TREE; |
| 9547 | tree lab6 = NULL_TREE; |
| 9548 | if (cancellable) |
| 9549 | { |
| 9550 | /* If this reduction needs destruction and parallel |
| 9551 | has been cancelled, jump around the merge operation |
| 9552 | to the destruction. */ |
| 9553 | tree lab5 = create_artificial_label (UNKNOWN_LOCATION); |
| 9554 | lab6 = create_artificial_label (UNKNOWN_LOCATION); |
| 9555 | tree zero = build_zero_cst (TREE_TYPE (cancellable)); |
| 9556 | g = gimple_build_cond (NE_EXPR, cancellable, zero, |
| 9557 | lab6, lab5); |
| 9558 | gimple_seq_add_stmt (end, g); |
| 9559 | gimple_seq_add_stmt (end, gimple_build_label (label: lab5)); |
| 9560 | } |
| 9561 | if (omp_privatize_by_reference (decl) |
| 9562 | && !useless_type_conversion_p (TREE_TYPE (placeholder), |
| 9563 | TREE_TYPE (ref))) |
| 9564 | ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref); |
| 9565 | ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref); |
| 9566 | tree refv = create_tmp_var (TREE_TYPE (ref)); |
| 9567 | gimplify_assign (refv, ref, end); |
| 9568 | ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv); |
| 9569 | SET_DECL_VALUE_EXPR (placeholder, ref); |
| 9570 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 9571 | tree d = maybe_lookup_decl (var: decl, ctx); |
| 9572 | gcc_assert (d); |
| 9573 | if (DECL_HAS_VALUE_EXPR_P (d)) |
| 9574 | oldv = DECL_VALUE_EXPR (d); |
| 9575 | if (omp_privatize_by_reference (decl: var)) |
| 9576 | { |
| 9577 | tree v = fold_convert (TREE_TYPE (d), |
| 9578 | build_fold_addr_expr (new_var)); |
| 9579 | SET_DECL_VALUE_EXPR (d, v); |
| 9580 | } |
| 9581 | else |
| 9582 | SET_DECL_VALUE_EXPR (d, new_var); |
| 9583 | DECL_HAS_VALUE_EXPR_P (d) = 1; |
| 9584 | lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); |
| 9585 | if (oldv) |
| 9586 | SET_DECL_VALUE_EXPR (d, oldv); |
| 9587 | else |
| 9588 | { |
| 9589 | SET_DECL_VALUE_EXPR (d, NULL_TREE); |
| 9590 | DECL_HAS_VALUE_EXPR_P (d) = 0; |
| 9591 | } |
| 9592 | gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); |
| 9593 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 9594 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
| 9595 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; |
| 9596 | if (cancellable) |
| 9597 | gimple_seq_add_stmt (end, gimple_build_label (label: lab6)); |
| 9598 | tree x = lang_hooks.decls.omp_clause_dtor (c, new_var); |
| 9599 | if (x) |
| 9600 | { |
| 9601 | gimple_seq tseq = NULL; |
| 9602 | gimplify_stmt (&x, &tseq); |
| 9603 | gimple_seq_add_seq (end, tseq); |
| 9604 | } |
| 9605 | } |
| 9606 | else |
| 9607 | { |
| 9608 | tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var); |
| 9609 | ref = unshare_expr (ref); |
| 9610 | gimplify_assign (ref, x, end); |
| 9611 | } |
| 9612 | gimple_seq_add_stmt (end, gimple_build_label (label: lab4)); |
| 9613 | ++cnt; |
| 9614 | field = DECL_CHAIN (bfield); |
| 9615 | } |
| 9616 | } |
| 9617 | |
| 9618 | if (code == OMP_TASKGROUP) |
| 9619 | { |
| 9620 | t = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER); |
| 9621 | g = gimple_build_call (t, 1, build_fold_addr_expr (avar)); |
| 9622 | gimple_seq_add_stmt (start, g); |
| 9623 | } |
| 9624 | else |
| 9625 | { |
| 9626 | tree c; |
| 9627 | if (code == OMP_FOR) |
| 9628 | c = gimple_omp_for_clauses (gs: ctx->stmt); |
| 9629 | else if (code == OMP_SECTIONS) |
| 9630 | c = gimple_omp_sections_clauses (gs: ctx->stmt); |
| 9631 | else if (code == OMP_SCOPE) |
| 9632 | c = gimple_omp_scope_clauses (gs: ctx->stmt); |
| 9633 | else |
| 9634 | c = gimple_omp_taskreg_clauses (gs: ctx->stmt); |
| 9635 | c = omp_find_clause (clauses: c, kind: OMP_CLAUSE__REDUCTEMP_); |
| 9636 | t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)), |
| 9637 | build_fold_addr_expr (avar)); |
| 9638 | gimplify_assign (OMP_CLAUSE_DECL (c), t, start); |
| 9639 | } |
| 9640 | |
| 9641 | gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz)); |
| 9642 | gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx, |
| 9643 | size_one_node)); |
| 9644 | g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2); |
| 9645 | gimple_seq_add_stmt (end, g); |
| 9646 | gimple_seq_add_stmt (end, gimple_build_label (label: lab2)); |
| 9647 | if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE) |
| 9648 | { |
| 9649 | enum built_in_function bfn |
| 9650 | = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER; |
| 9651 | t = builtin_decl_explicit (fncode: bfn); |
| 9652 | tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t))); |
| 9653 | tree arg; |
| 9654 | if (cancellable) |
| 9655 | { |
| 9656 | arg = create_tmp_var (c_bool_type); |
| 9657 | gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR, |
| 9658 | cancellable)); |
| 9659 | } |
| 9660 | else |
| 9661 | arg = build_int_cst (c_bool_type, 0); |
| 9662 | g = gimple_build_call (t, 1, arg); |
| 9663 | } |
| 9664 | else |
| 9665 | { |
| 9666 | t = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER); |
| 9667 | g = gimple_build_call (t, 1, build_fold_addr_expr (avar)); |
| 9668 | } |
| 9669 | gimple_seq_add_stmt (end, g); |
| 9670 | if (lab7) |
| 9671 | gimple_seq_add_stmt (end, gimple_build_label (label: lab7)); |
| 9672 | t = build_constructor (atype, NULL); |
| 9673 | TREE_THIS_VOLATILE (t) = 1; |
| 9674 | gimple_seq_add_stmt (end, gimple_build_assign (avar, t)); |
| 9675 | } |
| 9676 | |
| 9677 | /* Expand code for an OpenMP taskgroup directive. */ |
| 9678 | |
| 9679 | static void |
| 9680 | lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 9681 | { |
| 9682 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 9683 | gcall *x; |
| 9684 | gbind *bind; |
| 9685 | gimple_seq dseq = NULL; |
| 9686 | tree block = make_node (BLOCK); |
| 9687 | |
| 9688 | bind = gimple_build_bind (NULL, NULL, block); |
| 9689 | gsi_replace (gsi_p, bind, true); |
| 9690 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 9691 | |
| 9692 | push_gimplify_context (); |
| 9693 | |
| 9694 | x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_START), |
| 9695 | 0); |
| 9696 | gimple_bind_add_stmt (bind_stmt: bind, stmt: x); |
| 9697 | |
| 9698 | lower_omp_task_reductions (ctx, code: OMP_TASKGROUP, |
| 9699 | clauses: gimple_omp_taskgroup_clauses (gs: stmt), |
| 9700 | start: gimple_bind_body_ptr (bind_stmt: bind), end: &dseq); |
| 9701 | |
| 9702 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 9703 | gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt)); |
| 9704 | gimple_omp_set_body (gs: stmt, NULL); |
| 9705 | |
| 9706 | gimple_bind_add_seq (bind_stmt: bind, seq: dseq); |
| 9707 | |
| 9708 | pop_gimplify_context (bind); |
| 9709 | |
| 9710 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 9711 | BLOCK_VARS (block) = ctx->block_vars; |
| 9712 | } |
| 9713 | |
| 9714 | |
| 9715 | /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */ |
| 9716 | |
| 9717 | static void |
| 9718 | lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt, |
| 9719 | omp_context *ctx) |
| 9720 | { |
| 9721 | struct omp_for_data fd; |
| 9722 | if (!ctx->outer || gimple_code (g: ctx->outer->stmt) != GIMPLE_OMP_FOR) |
| 9723 | return; |
| 9724 | |
| 9725 | unsigned int len = gimple_omp_for_collapse (gs: ctx->outer->stmt); |
| 9726 | struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len); |
| 9727 | omp_extract_for_data (for_stmt: as_a <gomp_for *> (p: ctx->outer->stmt), fd: &fd, loops); |
| 9728 | if (!fd.ordered) |
| 9729 | return; |
| 9730 | |
| 9731 | tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt); |
| 9732 | tree c = gimple_omp_ordered_clauses (ord_stmt); |
| 9733 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS |
| 9734 | && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK) |
| 9735 | { |
| 9736 | /* Merge depend clauses from multiple adjacent |
| 9737 | #pragma omp ordered depend(sink:...) constructs |
| 9738 | into one #pragma omp ordered depend(sink:...), so that |
| 9739 | we can optimize them together. */ |
| 9740 | gimple_stmt_iterator gsi = *gsi_p; |
| 9741 | gsi_next (i: &gsi); |
| 9742 | while (!gsi_end_p (i: gsi)) |
| 9743 | { |
| 9744 | gimple *stmt = gsi_stmt (i: gsi); |
| 9745 | if (is_gimple_debug (gs: stmt) |
| 9746 | || gimple_code (g: stmt) == GIMPLE_NOP) |
| 9747 | { |
| 9748 | gsi_next (i: &gsi); |
| 9749 | continue; |
| 9750 | } |
| 9751 | if (gimple_code (g: stmt) != GIMPLE_OMP_ORDERED) |
| 9752 | break; |
| 9753 | gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (p: stmt); |
| 9754 | c = gimple_omp_ordered_clauses (ord_stmt: ord_stmt2); |
| 9755 | if (c == NULL_TREE |
| 9756 | || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS |
| 9757 | || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK) |
| 9758 | break; |
| 9759 | while (*list_p) |
| 9760 | list_p = &OMP_CLAUSE_CHAIN (*list_p); |
| 9761 | *list_p = c; |
| 9762 | gsi_remove (&gsi, true); |
| 9763 | } |
| 9764 | } |
| 9765 | |
| 9766 | /* Canonicalize sink dependence clauses into one folded clause if |
| 9767 | possible. |
| 9768 | |
| 9769 | The basic algorithm is to create a sink vector whose first |
| 9770 | element is the GCD of all the first elements, and whose remaining |
| 9771 | elements are the minimum of the subsequent columns. |
| 9772 | |
| 9773 | We ignore dependence vectors whose first element is zero because |
| 9774 | such dependencies are known to be executed by the same thread. |
| 9775 | |
| 9776 | We take into account the direction of the loop, so a minimum |
| 9777 | becomes a maximum if the loop is iterating forwards. We also |
| 9778 | ignore sink clauses where the loop direction is unknown, or where |
| 9779 | the offsets are clearly invalid because they are not a multiple |
| 9780 | of the loop increment. |
| 9781 | |
| 9782 | For example: |
| 9783 | |
| 9784 | #pragma omp for ordered(2) |
| 9785 | for (i=0; i < N; ++i) |
| 9786 | for (j=0; j < M; ++j) |
| 9787 | { |
| 9788 | #pragma omp ordered \ |
| 9789 | depend(sink:i-8,j-2) \ |
| 9790 | depend(sink:i,j-1) \ // Completely ignored because i+0. |
| 9791 | depend(sink:i-4,j-3) \ |
| 9792 | depend(sink:i-6,j-4) |
| 9793 | #pragma omp ordered depend(source) |
| 9794 | } |
| 9795 | |
| 9796 | Folded clause is: |
| 9797 | |
| 9798 | depend(sink:-gcd(8,4,6),-min(2,3,4)) |
| 9799 | -or- |
| 9800 | depend(sink:-2,-2) |
| 9801 | */ |
| 9802 | |
| 9803 | /* FIXME: Computing GCD's where the first element is zero is |
| 9804 | non-trivial in the presence of collapsed loops. Do this later. */ |
| 9805 | if (fd.collapse > 1) |
| 9806 | return; |
| 9807 | |
| 9808 | wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1); |
| 9809 | |
| 9810 | /* wide_int is not a POD so it must be default-constructed. */ |
| 9811 | for (unsigned i = 0; i != 2 * len - 1; ++i) |
| 9812 | new (static_cast<void*>(folded_deps + i)) wide_int (); |
| 9813 | |
| 9814 | tree folded_dep = NULL_TREE; |
| 9815 | /* TRUE if the first dimension's offset is negative. */ |
| 9816 | bool neg_offset_p = false; |
| 9817 | |
| 9818 | list_p = gimple_omp_ordered_clauses_ptr (ord_stmt); |
| 9819 | unsigned int i; |
| 9820 | while ((c = *list_p) != NULL) |
| 9821 | { |
| 9822 | bool remove = false; |
| 9823 | |
| 9824 | gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS); |
| 9825 | if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK) |
| 9826 | goto next_ordered_clause; |
| 9827 | |
| 9828 | tree vec; |
| 9829 | for (vec = OMP_CLAUSE_DECL (c), i = 0; |
| 9830 | vec && TREE_CODE (vec) == TREE_LIST; |
| 9831 | vec = TREE_CHAIN (vec), ++i) |
| 9832 | { |
| 9833 | gcc_assert (i < len); |
| 9834 | |
| 9835 | /* omp_extract_for_data has canonicalized the condition. */ |
| 9836 | gcc_assert (fd.loops[i].cond_code == LT_EXPR |
| 9837 | || fd.loops[i].cond_code == GT_EXPR); |
| 9838 | bool forward = fd.loops[i].cond_code == LT_EXPR; |
| 9839 | bool maybe_lexically_later = true; |
| 9840 | |
| 9841 | /* While the committee makes up its mind, bail if we have any |
| 9842 | non-constant steps. */ |
| 9843 | if (TREE_CODE (fd.loops[i].step) != INTEGER_CST) |
| 9844 | goto lower_omp_ordered_ret; |
| 9845 | |
| 9846 | tree itype = TREE_TYPE (TREE_VALUE (vec)); |
| 9847 | if (POINTER_TYPE_P (itype)) |
| 9848 | itype = sizetype; |
| 9849 | wide_int offset = wide_int::from (x: wi::to_wide (TREE_PURPOSE (vec)), |
| 9850 | TYPE_PRECISION (itype), |
| 9851 | TYPE_SIGN (itype)); |
| 9852 | |
| 9853 | /* Ignore invalid offsets that are not multiples of the step. */ |
| 9854 | if (!wi::multiple_of_p (x: wi::abs (x: offset), |
| 9855 | y: wi::abs (x: wi::to_wide (t: fd.loops[i].step)), |
| 9856 | sgn: UNSIGNED)) |
| 9857 | { |
| 9858 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
| 9859 | "ignoring %<sink%> clause with offset that is not " |
| 9860 | "a multiple of the loop step" ); |
| 9861 | remove = true; |
| 9862 | goto next_ordered_clause; |
| 9863 | } |
| 9864 | |
| 9865 | /* Calculate the first dimension. The first dimension of |
| 9866 | the folded dependency vector is the GCD of the first |
| 9867 | elements, while ignoring any first elements whose offset |
| 9868 | is 0. */ |
| 9869 | if (i == 0) |
| 9870 | { |
| 9871 | /* Ignore dependence vectors whose first dimension is 0. */ |
| 9872 | if (offset == 0) |
| 9873 | { |
| 9874 | remove = true; |
| 9875 | goto next_ordered_clause; |
| 9876 | } |
| 9877 | else |
| 9878 | { |
| 9879 | if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (x: offset))) |
| 9880 | { |
| 9881 | error_at (OMP_CLAUSE_LOCATION (c), |
| 9882 | "first offset must be in opposite direction " |
| 9883 | "of loop iterations" ); |
| 9884 | goto lower_omp_ordered_ret; |
| 9885 | } |
| 9886 | if (forward) |
| 9887 | offset = -offset; |
| 9888 | neg_offset_p = forward; |
| 9889 | /* Initialize the first time around. */ |
| 9890 | if (folded_dep == NULL_TREE) |
| 9891 | { |
| 9892 | folded_dep = c; |
| 9893 | folded_deps[0] = offset; |
| 9894 | } |
| 9895 | else |
| 9896 | folded_deps[0] = wi::gcd (a: folded_deps[0], |
| 9897 | b: offset, sgn: UNSIGNED); |
| 9898 | } |
| 9899 | } |
| 9900 | /* Calculate minimum for the remaining dimensions. */ |
| 9901 | else |
| 9902 | { |
| 9903 | folded_deps[len + i - 1] = offset; |
| 9904 | if (folded_dep == c) |
| 9905 | folded_deps[i] = offset; |
| 9906 | else if (maybe_lexically_later |
| 9907 | && !wi::eq_p (x: folded_deps[i], y: offset)) |
| 9908 | { |
| 9909 | if (forward ^ wi::gts_p (x: folded_deps[i], y: offset)) |
| 9910 | { |
| 9911 | unsigned int j; |
| 9912 | folded_dep = c; |
| 9913 | for (j = 1; j <= i; j++) |
| 9914 | folded_deps[j] = folded_deps[len + j - 1]; |
| 9915 | } |
| 9916 | else |
| 9917 | maybe_lexically_later = false; |
| 9918 | } |
| 9919 | } |
| 9920 | } |
| 9921 | gcc_assert (i == len); |
| 9922 | |
| 9923 | remove = true; |
| 9924 | |
| 9925 | next_ordered_clause: |
| 9926 | if (remove) |
| 9927 | *list_p = OMP_CLAUSE_CHAIN (c); |
| 9928 | else |
| 9929 | list_p = &OMP_CLAUSE_CHAIN (c); |
| 9930 | } |
| 9931 | |
| 9932 | if (folded_dep) |
| 9933 | { |
| 9934 | if (neg_offset_p) |
| 9935 | folded_deps[0] = -folded_deps[0]; |
| 9936 | |
| 9937 | tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep))); |
| 9938 | if (POINTER_TYPE_P (itype)) |
| 9939 | itype = sizetype; |
| 9940 | |
| 9941 | TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep)) |
| 9942 | = wide_int_to_tree (type: itype, cst: folded_deps[0]); |
| 9943 | OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt); |
| 9944 | *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep; |
| 9945 | } |
| 9946 | |
| 9947 | lower_omp_ordered_ret: |
| 9948 | |
| 9949 | /* Ordered without clauses is #pragma omp threads, while we want |
| 9950 | a nop instead if we remove all clauses. */ |
| 9951 | if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE) |
| 9952 | gsi_replace (gsi_p, gimple_build_nop (), true); |
| 9953 | } |
| 9954 | |
| 9955 | |
| 9956 | /* Expand code for an OpenMP ordered directive. */ |
| 9957 | |
| 9958 | static void |
| 9959 | lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 9960 | { |
| 9961 | tree block; |
| 9962 | gimple *stmt = gsi_stmt (i: *gsi_p), *g; |
| 9963 | gomp_ordered *ord_stmt = as_a <gomp_ordered *> (p: stmt); |
| 9964 | gcall *x; |
| 9965 | gbind *bind; |
| 9966 | bool simd = omp_find_clause (clauses: gimple_omp_ordered_clauses (ord_stmt), |
| 9967 | kind: OMP_CLAUSE_SIMD); |
| 9968 | /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing |
| 9969 | loop. */ |
| 9970 | bool maybe_simt |
| 9971 | = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1; |
| 9972 | bool threads = omp_find_clause (clauses: gimple_omp_ordered_clauses (ord_stmt), |
| 9973 | kind: OMP_CLAUSE_THREADS); |
| 9974 | |
| 9975 | if (gimple_omp_ordered_standalone_p (g: ord_stmt)) |
| 9976 | { |
| 9977 | /* FIXME: This is needs to be moved to the expansion to verify various |
| 9978 | conditions only testable on cfg with dominators computed, and also |
| 9979 | all the depend clauses to be merged still might need to be available |
| 9980 | for the runtime checks. */ |
| 9981 | if (0) |
| 9982 | lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx); |
| 9983 | return; |
| 9984 | } |
| 9985 | |
| 9986 | push_gimplify_context (); |
| 9987 | |
| 9988 | block = make_node (BLOCK); |
| 9989 | bind = gimple_build_bind (NULL, NULL, block); |
| 9990 | gsi_replace (gsi_p, bind, true); |
| 9991 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 9992 | |
| 9993 | if (simd) |
| 9994 | { |
| 9995 | x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1, |
| 9996 | build_int_cst (NULL_TREE, threads)); |
| 9997 | cfun->has_simduid_loops = true; |
| 9998 | } |
| 9999 | else |
| 10000 | x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ORDERED_START), |
| 10001 | 0); |
| 10002 | gimple_bind_add_stmt (bind_stmt: bind, stmt: x); |
| 10003 | |
| 10004 | tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE; |
| 10005 | if (maybe_simt) |
| 10006 | { |
| 10007 | counter = create_tmp_var (integer_type_node); |
| 10008 | g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0); |
| 10009 | gimple_call_set_lhs (gs: g, lhs: counter); |
| 10010 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10011 | |
| 10012 | body = create_artificial_label (UNKNOWN_LOCATION); |
| 10013 | test = create_artificial_label (UNKNOWN_LOCATION); |
| 10014 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: body)); |
| 10015 | |
| 10016 | tree simt_pred = create_tmp_var (integer_type_node); |
| 10017 | g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter); |
| 10018 | gimple_call_set_lhs (gs: g, lhs: simt_pred); |
| 10019 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10020 | |
| 10021 | tree t = create_artificial_label (UNKNOWN_LOCATION); |
| 10022 | g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test); |
| 10023 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10024 | |
| 10025 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: t)); |
| 10026 | } |
| 10027 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 10028 | gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt))); |
| 10029 | gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt)); |
| 10030 | gimple_omp_set_body (gs: stmt, NULL); |
| 10031 | |
| 10032 | if (maybe_simt) |
| 10033 | { |
| 10034 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: test)); |
| 10035 | g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node); |
| 10036 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10037 | |
| 10038 | tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node); |
| 10039 | tree nonneg = create_tmp_var (integer_type_node); |
| 10040 | gimple_seq tseq = NULL; |
| 10041 | gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq); |
| 10042 | gimple_bind_add_seq (bind_stmt: bind, seq: tseq); |
| 10043 | |
| 10044 | g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg); |
| 10045 | gimple_call_set_lhs (gs: g, lhs: nonneg); |
| 10046 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10047 | |
| 10048 | tree end = create_artificial_label (UNKNOWN_LOCATION); |
| 10049 | g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end); |
| 10050 | gimple_bind_add_stmt (bind_stmt: bind, stmt: g); |
| 10051 | |
| 10052 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: end)); |
| 10053 | } |
| 10054 | if (simd) |
| 10055 | x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1, |
| 10056 | build_int_cst (NULL_TREE, threads)); |
| 10057 | else |
| 10058 | x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ORDERED_END), |
| 10059 | 0); |
| 10060 | gimple_bind_add_stmt (bind_stmt: bind, stmt: x); |
| 10061 | |
| 10062 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true)); |
| 10063 | |
| 10064 | pop_gimplify_context (bind); |
| 10065 | |
| 10066 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 10067 | BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind); |
| 10068 | } |
| 10069 | |
| 10070 | |
| 10071 | /* Expand code for an OpenMP scan directive and the structured block |
| 10072 | before the scan directive. */ |
| 10073 | |
| 10074 | static void |
| 10075 | lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 10076 | { |
| 10077 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 10078 | bool has_clauses |
| 10079 | = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt)) != NULL; |
| 10080 | tree lane = NULL_TREE; |
| 10081 | gimple_seq before = NULL; |
| 10082 | omp_context *octx = ctx->outer; |
| 10083 | gcc_assert (octx); |
| 10084 | if (octx->scan_exclusive && !has_clauses) |
| 10085 | { |
| 10086 | gimple_stmt_iterator gsi2 = *gsi_p; |
| 10087 | gsi_next (i: &gsi2); |
| 10088 | gimple *stmt2 = gsi_stmt (i: gsi2); |
| 10089 | /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses |
| 10090 | with following GIMPLE_OMP_SCAN with clauses, so that input_phase, |
| 10091 | the one with exclusive clause(s), comes first. */ |
| 10092 | if (stmt2 |
| 10093 | && gimple_code (g: stmt2) == GIMPLE_OMP_SCAN |
| 10094 | && gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt2)) != NULL) |
| 10095 | { |
| 10096 | gsi_remove (gsi_p, false); |
| 10097 | gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT); |
| 10098 | ctx = maybe_lookup_ctx (stmt: stmt2); |
| 10099 | gcc_assert (ctx); |
| 10100 | lower_omp_scan (gsi_p, ctx); |
| 10101 | return; |
| 10102 | } |
| 10103 | } |
| 10104 | |
| 10105 | bool input_phase = has_clauses ^ octx->scan_inclusive; |
| 10106 | bool is_simd = (gimple_code (g: octx->stmt) == GIMPLE_OMP_FOR |
| 10107 | && gimple_omp_for_kind (g: octx->stmt) == GF_OMP_FOR_KIND_SIMD); |
| 10108 | bool is_for = (gimple_code (g: octx->stmt) == GIMPLE_OMP_FOR |
| 10109 | && gimple_omp_for_kind (g: octx->stmt) == GF_OMP_FOR_KIND_FOR |
| 10110 | && !gimple_omp_for_combined_p (g: octx->stmt)); |
| 10111 | bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (g: octx->stmt); |
| 10112 | if (is_for_simd && octx->for_simd_scan_phase) |
| 10113 | is_simd = false; |
| 10114 | if (is_simd) |
| 10115 | if (tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: octx->stmt), |
| 10116 | kind: OMP_CLAUSE__SIMDUID_)) |
| 10117 | { |
| 10118 | tree uid = OMP_CLAUSE__SIMDUID__DECL (c); |
| 10119 | lane = create_tmp_var (unsigned_type_node); |
| 10120 | tree t = build_int_cst (integer_type_node, |
| 10121 | input_phase ? 1 |
| 10122 | : octx->scan_inclusive ? 2 : 3); |
| 10123 | gimple *g |
| 10124 | = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t); |
| 10125 | gimple_call_set_lhs (gs: g, lhs: lane); |
| 10126 | gimple_seq_add_stmt (&before, g); |
| 10127 | } |
| 10128 | |
| 10129 | if (is_simd || is_for) |
| 10130 | { |
| 10131 | for (tree c = gimple_omp_for_clauses (gs: octx->stmt); |
| 10132 | c; c = OMP_CLAUSE_CHAIN (c)) |
| 10133 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 10134 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 10135 | { |
| 10136 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 10137 | tree var = OMP_CLAUSE_DECL (c); |
| 10138 | tree new_var = lookup_decl (var, ctx: octx); |
| 10139 | tree val = new_var; |
| 10140 | tree var2 = NULL_TREE; |
| 10141 | tree var3 = NULL_TREE; |
| 10142 | tree var4 = NULL_TREE; |
| 10143 | tree lane0 = NULL_TREE; |
| 10144 | tree new_vard = new_var; |
| 10145 | if (omp_privatize_by_reference (decl: var)) |
| 10146 | { |
| 10147 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 10148 | val = new_var; |
| 10149 | } |
| 10150 | if (DECL_HAS_VALUE_EXPR_P (new_vard)) |
| 10151 | { |
| 10152 | val = DECL_VALUE_EXPR (new_vard); |
| 10153 | if (new_vard != new_var) |
| 10154 | { |
| 10155 | gcc_assert (TREE_CODE (val) == ADDR_EXPR); |
| 10156 | val = TREE_OPERAND (val, 0); |
| 10157 | } |
| 10158 | if (TREE_CODE (val) == ARRAY_REF |
| 10159 | && VAR_P (TREE_OPERAND (val, 0))) |
| 10160 | { |
| 10161 | tree v = TREE_OPERAND (val, 0); |
| 10162 | if (lookup_attribute (attr_name: "omp simd array" , |
| 10163 | DECL_ATTRIBUTES (v))) |
| 10164 | { |
| 10165 | val = unshare_expr (val); |
| 10166 | lane0 = TREE_OPERAND (val, 1); |
| 10167 | TREE_OPERAND (val, 1) = lane; |
| 10168 | var2 = lookup_decl (var: v, ctx: octx); |
| 10169 | if (octx->scan_exclusive) |
| 10170 | var4 = lookup_decl (var: var2, ctx: octx); |
| 10171 | if (input_phase |
| 10172 | && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 10173 | var3 = maybe_lookup_decl (var: var4 ? var4 : var2, ctx: octx); |
| 10174 | if (!input_phase) |
| 10175 | { |
| 10176 | var2 = build4 (ARRAY_REF, TREE_TYPE (val), |
| 10177 | var2, lane, NULL_TREE, NULL_TREE); |
| 10178 | TREE_THIS_NOTRAP (var2) = 1; |
| 10179 | if (octx->scan_exclusive) |
| 10180 | { |
| 10181 | var4 = build4 (ARRAY_REF, TREE_TYPE (val), |
| 10182 | var4, lane, NULL_TREE, |
| 10183 | NULL_TREE); |
| 10184 | TREE_THIS_NOTRAP (var4) = 1; |
| 10185 | } |
| 10186 | } |
| 10187 | else |
| 10188 | var2 = val; |
| 10189 | } |
| 10190 | } |
| 10191 | gcc_assert (var2); |
| 10192 | } |
| 10193 | else |
| 10194 | { |
| 10195 | var2 = build_outer_var_ref (var, ctx: octx); |
| 10196 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 10197 | { |
| 10198 | var3 = maybe_lookup_decl (var: new_vard, ctx: octx); |
| 10199 | if (var3 == new_vard || var3 == NULL_TREE) |
| 10200 | var3 = NULL_TREE; |
| 10201 | else if (is_simd && octx->scan_exclusive && !input_phase) |
| 10202 | { |
| 10203 | var4 = maybe_lookup_decl (var: var3, ctx: octx); |
| 10204 | if (var4 == var3 || var4 == NULL_TREE) |
| 10205 | { |
| 10206 | if (TREE_ADDRESSABLE (TREE_TYPE (new_var))) |
| 10207 | { |
| 10208 | var4 = var3; |
| 10209 | var3 = NULL_TREE; |
| 10210 | } |
| 10211 | else |
| 10212 | var4 = NULL_TREE; |
| 10213 | } |
| 10214 | } |
| 10215 | } |
| 10216 | if (is_simd |
| 10217 | && octx->scan_exclusive |
| 10218 | && !input_phase |
| 10219 | && var4 == NULL_TREE) |
| 10220 | var4 = create_tmp_var (TREE_TYPE (val)); |
| 10221 | } |
| 10222 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 10223 | { |
| 10224 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 10225 | if (input_phase) |
| 10226 | { |
| 10227 | if (var3) |
| 10228 | { |
| 10229 | /* If we've added a separate identity element |
| 10230 | variable, copy it over into val. */ |
| 10231 | tree x = lang_hooks.decls.omp_clause_assign_op (c, val, |
| 10232 | var3); |
| 10233 | gimplify_and_add (x, &before); |
| 10234 | } |
| 10235 | else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 10236 | { |
| 10237 | /* Otherwise, assign to it the identity element. */ |
| 10238 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 10239 | if (is_for) |
| 10240 | tseq = copy_gimple_seq_and_replace_locals (seq: tseq); |
| 10241 | tree ref = build_outer_var_ref (var, ctx: octx); |
| 10242 | tree x = (DECL_HAS_VALUE_EXPR_P (new_vard) |
| 10243 | ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); |
| 10244 | if (x) |
| 10245 | { |
| 10246 | if (new_vard != new_var) |
| 10247 | val = build_fold_addr_expr_loc (clause_loc, val); |
| 10248 | SET_DECL_VALUE_EXPR (new_vard, val); |
| 10249 | } |
| 10250 | SET_DECL_VALUE_EXPR (placeholder, ref); |
| 10251 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 10252 | lower_omp (&tseq, octx); |
| 10253 | if (x) |
| 10254 | SET_DECL_VALUE_EXPR (new_vard, x); |
| 10255 | SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); |
| 10256 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 10257 | gimple_seq_add_seq (&before, tseq); |
| 10258 | if (is_simd) |
| 10259 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 10260 | } |
| 10261 | } |
| 10262 | else if (is_simd) |
| 10263 | { |
| 10264 | tree x; |
| 10265 | if (octx->scan_exclusive) |
| 10266 | { |
| 10267 | tree v4 = unshare_expr (var4); |
| 10268 | tree v2 = unshare_expr (var2); |
| 10269 | x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2); |
| 10270 | gimplify_and_add (x, &before); |
| 10271 | } |
| 10272 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 10273 | x = (DECL_HAS_VALUE_EXPR_P (new_vard) |
| 10274 | ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); |
| 10275 | tree vexpr = val; |
| 10276 | if (x && new_vard != new_var) |
| 10277 | vexpr = build_fold_addr_expr_loc (clause_loc, val); |
| 10278 | if (x) |
| 10279 | SET_DECL_VALUE_EXPR (new_vard, vexpr); |
| 10280 | SET_DECL_VALUE_EXPR (placeholder, var2); |
| 10281 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 10282 | lower_omp (&tseq, octx); |
| 10283 | gimple_seq_add_seq (&before, tseq); |
| 10284 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 10285 | if (x) |
| 10286 | SET_DECL_VALUE_EXPR (new_vard, x); |
| 10287 | SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); |
| 10288 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 10289 | if (octx->scan_inclusive) |
| 10290 | { |
| 10291 | x = lang_hooks.decls.omp_clause_assign_op (c, val, |
| 10292 | var2); |
| 10293 | gimplify_and_add (x, &before); |
| 10294 | } |
| 10295 | else if (lane0 == NULL_TREE) |
| 10296 | { |
| 10297 | x = lang_hooks.decls.omp_clause_assign_op (c, val, |
| 10298 | var4); |
| 10299 | gimplify_and_add (x, &before); |
| 10300 | } |
| 10301 | } |
| 10302 | } |
| 10303 | else |
| 10304 | { |
| 10305 | if (input_phase) |
| 10306 | { |
| 10307 | /* input phase. Set val to initializer before |
| 10308 | the body. */ |
| 10309 | tree x = omp_reduction_init (clause: c, TREE_TYPE (new_var)); |
| 10310 | gimplify_assign (val, x, &before); |
| 10311 | } |
| 10312 | else if (is_simd) |
| 10313 | { |
| 10314 | /* scan phase. */ |
| 10315 | enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); |
| 10316 | if (code == MINUS_EXPR) |
| 10317 | code = PLUS_EXPR; |
| 10318 | |
| 10319 | tree x = build2 (code, TREE_TYPE (var2), |
| 10320 | unshare_expr (var2), unshare_expr (val)); |
| 10321 | if (octx->scan_inclusive) |
| 10322 | { |
| 10323 | gimplify_assign (unshare_expr (var2), x, &before); |
| 10324 | gimplify_assign (val, var2, &before); |
| 10325 | } |
| 10326 | else |
| 10327 | { |
| 10328 | gimplify_assign (unshare_expr (var4), |
| 10329 | unshare_expr (var2), &before); |
| 10330 | gimplify_assign (var2, x, &before); |
| 10331 | if (lane0 == NULL_TREE) |
| 10332 | gimplify_assign (val, var4, &before); |
| 10333 | } |
| 10334 | } |
| 10335 | } |
| 10336 | if (octx->scan_exclusive && !input_phase && lane0) |
| 10337 | { |
| 10338 | tree vexpr = unshare_expr (var4); |
| 10339 | TREE_OPERAND (vexpr, 1) = lane0; |
| 10340 | if (new_vard != new_var) |
| 10341 | vexpr = build_fold_addr_expr_loc (clause_loc, vexpr); |
| 10342 | SET_DECL_VALUE_EXPR (new_vard, vexpr); |
| 10343 | } |
| 10344 | } |
| 10345 | } |
| 10346 | if (is_simd && !is_for_simd) |
| 10347 | { |
| 10348 | gsi_insert_seq_after (gsi_p, gimple_omp_body (gs: stmt), GSI_SAME_STMT); |
| 10349 | gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT); |
| 10350 | gsi_replace (gsi_p, gimple_build_nop (), true); |
| 10351 | return; |
| 10352 | } |
| 10353 | lower_omp (gimple_omp_body_ptr (gs: stmt), octx); |
| 10354 | if (before) |
| 10355 | { |
| 10356 | gimple_stmt_iterator gsi = gsi_start (seq&: *gimple_omp_body_ptr (gs: stmt)); |
| 10357 | gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT); |
| 10358 | } |
| 10359 | } |
| 10360 | |
| 10361 | |
| 10362 | /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple |
| 10363 | substitution of a couple of function calls. But in the NAMED case, |
| 10364 | requires that languages coordinate a symbol name. It is therefore |
| 10365 | best put here in common code. */ |
| 10366 | |
| 10367 | static GTY(()) hash_map<tree, tree> *critical_name_mutexes; |
| 10368 | |
| 10369 | static void |
| 10370 | lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 10371 | { |
| 10372 | tree block; |
| 10373 | tree name, lock, unlock; |
| 10374 | gomp_critical *stmt = as_a <gomp_critical *> (p: gsi_stmt (i: *gsi_p)); |
| 10375 | gbind *bind; |
| 10376 | location_t loc = gimple_location (g: stmt); |
| 10377 | gimple_seq tbody; |
| 10378 | |
| 10379 | name = gimple_omp_critical_name (crit_stmt: stmt); |
| 10380 | if (name) |
| 10381 | { |
| 10382 | tree decl; |
| 10383 | |
| 10384 | if (!critical_name_mutexes) |
| 10385 | critical_name_mutexes = hash_map<tree, tree>::create_ggc (size: 10); |
| 10386 | |
| 10387 | tree *n = critical_name_mutexes->get (k: name); |
| 10388 | if (n == NULL) |
| 10389 | { |
| 10390 | char *new_str; |
| 10391 | |
| 10392 | decl = create_tmp_var_raw (ptr_type_node); |
| 10393 | |
| 10394 | new_str = ACONCAT ((".gomp_critical_user_" , |
| 10395 | IDENTIFIER_POINTER (name), NULL)); |
| 10396 | DECL_NAME (decl) = get_identifier (new_str); |
| 10397 | TREE_PUBLIC (decl) = 1; |
| 10398 | TREE_STATIC (decl) = 1; |
| 10399 | DECL_COMMON (decl) = 1; |
| 10400 | DECL_ARTIFICIAL (decl) = 1; |
| 10401 | DECL_IGNORED_P (decl) = 1; |
| 10402 | |
| 10403 | varpool_node::finalize_decl (decl); |
| 10404 | |
| 10405 | critical_name_mutexes->put (k: name, v: decl); |
| 10406 | } |
| 10407 | else |
| 10408 | decl = *n; |
| 10409 | |
| 10410 | /* If '#pragma omp critical' is inside offloaded region or |
| 10411 | inside function marked as offloadable, the symbol must be |
| 10412 | marked as offloadable too. */ |
| 10413 | omp_context *octx; |
| 10414 | if (cgraph_node::get (decl: current_function_decl)->offloadable) |
| 10415 | varpool_node::get_create (decl)->offloadable = 1; |
| 10416 | else |
| 10417 | for (octx = ctx->outer; octx; octx = octx->outer) |
| 10418 | if (is_gimple_omp_offloaded (stmt: octx->stmt)) |
| 10419 | { |
| 10420 | varpool_node::get_create (decl)->offloadable = 1; |
| 10421 | break; |
| 10422 | } |
| 10423 | |
| 10424 | lock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_NAME_START); |
| 10425 | lock = build_call_expr_loc (loc, lock, 1, |
| 10426 | build_fold_addr_expr_loc (loc, decl)); |
| 10427 | |
| 10428 | unlock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_NAME_END); |
| 10429 | unlock = build_call_expr_loc (loc, unlock, 1, |
| 10430 | build_fold_addr_expr_loc (loc, decl)); |
| 10431 | } |
| 10432 | else |
| 10433 | { |
| 10434 | lock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_START); |
| 10435 | lock = build_call_expr_loc (loc, lock, 0); |
| 10436 | |
| 10437 | unlock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_END); |
| 10438 | unlock = build_call_expr_loc (loc, unlock, 0); |
| 10439 | } |
| 10440 | |
| 10441 | push_gimplify_context (); |
| 10442 | |
| 10443 | block = make_node (BLOCK); |
| 10444 | bind = gimple_build_bind (NULL, NULL, block); |
| 10445 | gsi_replace (gsi_p, bind, true); |
| 10446 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 10447 | |
| 10448 | tbody = gimple_bind_body (gs: bind); |
| 10449 | gimplify_and_add (lock, &tbody); |
| 10450 | gimple_bind_set_body (bind_stmt: bind, seq: tbody); |
| 10451 | |
| 10452 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 10453 | gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt))); |
| 10454 | gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt)); |
| 10455 | gimple_omp_set_body (gs: stmt, NULL); |
| 10456 | |
| 10457 | tbody = gimple_bind_body (gs: bind); |
| 10458 | gimplify_and_add (unlock, &tbody); |
| 10459 | gimple_bind_set_body (bind_stmt: bind, seq: tbody); |
| 10460 | |
| 10461 | gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true)); |
| 10462 | |
| 10463 | pop_gimplify_context (bind); |
| 10464 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 10465 | BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind); |
| 10466 | } |
| 10467 | |
| 10468 | /* A subroutine of lower_omp_for. Generate code to emit the predicate |
| 10469 | for a lastprivate clause. Given a loop control predicate of (V |
| 10470 | cond N2), we gate the clause on (!(V cond N2)). The lowered form |
| 10471 | is appended to *DLIST, iterator initialization is appended to |
| 10472 | *BODY_P. *CLIST is for lastprivate(conditional:) code that needs |
| 10473 | to be emitted in a critical section. */ |
| 10474 | |
| 10475 | static void |
| 10476 | lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p, |
| 10477 | gimple_seq *dlist, gimple_seq *clist, |
| 10478 | struct omp_context *ctx) |
| 10479 | { |
| 10480 | tree clauses, cond, vinit; |
| 10481 | enum tree_code cond_code; |
| 10482 | gimple_seq stmts; |
| 10483 | |
| 10484 | cond_code = fd->loop.cond_code; |
| 10485 | cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR; |
| 10486 | |
| 10487 | /* When possible, use a strict equality expression. This can let VRP |
| 10488 | type optimizations deduce the value and remove a copy. */ |
| 10489 | if (tree_fits_shwi_p (fd->loop.step)) |
| 10490 | { |
| 10491 | HOST_WIDE_INT step = tree_to_shwi (fd->loop.step); |
| 10492 | if (step == 1 || step == -1) |
| 10493 | cond_code = EQ_EXPR; |
| 10494 | } |
| 10495 | |
| 10496 | tree n2 = fd->loop.n2; |
| 10497 | if (fd->collapse > 1 |
| 10498 | && TREE_CODE (n2) != INTEGER_CST |
| 10499 | && gimple_omp_for_combined_into_p (g: fd->for_stmt)) |
| 10500 | { |
| 10501 | struct omp_context *taskreg_ctx = NULL; |
| 10502 | if (gimple_code (g: ctx->outer->stmt) == GIMPLE_OMP_FOR) |
| 10503 | { |
| 10504 | gomp_for *gfor = as_a <gomp_for *> (p: ctx->outer->stmt); |
| 10505 | if (gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_FOR |
| 10506 | || gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_DISTRIBUTE) |
| 10507 | { |
| 10508 | if (gimple_omp_for_combined_into_p (g: gfor)) |
| 10509 | { |
| 10510 | gcc_assert (ctx->outer->outer |
| 10511 | && is_parallel_ctx (ctx->outer->outer)); |
| 10512 | taskreg_ctx = ctx->outer->outer; |
| 10513 | } |
| 10514 | else |
| 10515 | { |
| 10516 | struct omp_for_data outer_fd; |
| 10517 | omp_extract_for_data (for_stmt: gfor, fd: &outer_fd, NULL); |
| 10518 | n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2); |
| 10519 | } |
| 10520 | } |
| 10521 | else if (gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_TASKLOOP) |
| 10522 | taskreg_ctx = ctx->outer->outer; |
| 10523 | } |
| 10524 | else if (is_taskreg_ctx (ctx: ctx->outer)) |
| 10525 | taskreg_ctx = ctx->outer; |
| 10526 | if (taskreg_ctx) |
| 10527 | { |
| 10528 | int i; |
| 10529 | tree taskreg_clauses |
| 10530 | = gimple_omp_taskreg_clauses (gs: taskreg_ctx->stmt); |
| 10531 | tree innerc = omp_find_clause (clauses: taskreg_clauses, |
| 10532 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 10533 | gcc_assert (innerc); |
| 10534 | int count = fd->collapse; |
| 10535 | if (fd->non_rect |
| 10536 | && fd->last_nonrect == fd->first_nonrect + 1) |
| 10537 | if (tree v = gimple_omp_for_index (gs: fd->for_stmt, i: fd->last_nonrect)) |
| 10538 | if (!TYPE_UNSIGNED (TREE_TYPE (v))) |
| 10539 | count += 4; |
| 10540 | for (i = 0; i < count; i++) |
| 10541 | { |
| 10542 | innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc), |
| 10543 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 10544 | gcc_assert (innerc); |
| 10545 | } |
| 10546 | innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc), |
| 10547 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 10548 | if (innerc) |
| 10549 | n2 = fold_convert (TREE_TYPE (n2), |
| 10550 | lookup_decl (OMP_CLAUSE_DECL (innerc), |
| 10551 | taskreg_ctx)); |
| 10552 | } |
| 10553 | } |
| 10554 | cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2); |
| 10555 | |
| 10556 | clauses = gimple_omp_for_clauses (gs: fd->for_stmt); |
| 10557 | stmts = NULL; |
| 10558 | lower_lastprivate_clauses (clauses, predicate: cond, body_p, stmt_list: &stmts, cstmt_list: clist, ctx); |
| 10559 | if (!gimple_seq_empty_p (s: stmts)) |
| 10560 | { |
| 10561 | gimple_seq_add_seq (&stmts, *dlist); |
| 10562 | *dlist = stmts; |
| 10563 | |
| 10564 | /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */ |
| 10565 | vinit = fd->loop.n1; |
| 10566 | if (cond_code == EQ_EXPR |
| 10567 | && tree_fits_shwi_p (fd->loop.n2) |
| 10568 | && ! integer_zerop (fd->loop.n2)) |
| 10569 | vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0); |
| 10570 | else |
| 10571 | vinit = unshare_expr (vinit); |
| 10572 | |
| 10573 | /* Initialize the iterator variable, so that threads that don't execute |
| 10574 | any iterations don't execute the lastprivate clauses by accident. */ |
| 10575 | gimplify_assign (fd->loop.v, vinit, body_p); |
| 10576 | } |
| 10577 | } |
| 10578 | |
| 10579 | /* OpenACC privatization. |
| 10580 | |
| 10581 | Or, in other words, *sharing* at the respective OpenACC level of |
| 10582 | parallelism. |
| 10583 | |
| 10584 | From a correctness perspective, a non-addressable variable can't be accessed |
| 10585 | outside the current thread, so it can go in a (faster than shared memory) |
| 10586 | register -- though that register may need to be broadcast in some |
| 10587 | circumstances. A variable can only meaningfully be "shared" across workers |
| 10588 | or vector lanes if its address is taken, e.g. by a call to an atomic |
| 10589 | builtin. |
| 10590 | |
| 10591 | From an optimisation perspective, the answer might be fuzzier: maybe |
| 10592 | sometimes, using shared memory directly would be faster than |
| 10593 | broadcasting. */ |
| 10594 | |
| 10595 | static void |
| 10596 | oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags, |
| 10597 | const location_t loc, const tree c, |
| 10598 | const tree decl) |
| 10599 | { |
| 10600 | const dump_user_location_t d_u_loc |
| 10601 | = dump_user_location_t::from_location_t (loc); |
| 10602 | /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */ |
| 10603 | #if __GNUC__ >= 10 |
| 10604 | # pragma GCC diagnostic push |
| 10605 | # pragma GCC diagnostic ignored "-Wformat" |
| 10606 | #endif |
| 10607 | dump_printf_loc (l_dump_flags, d_u_loc, |
| 10608 | "variable %<%T%> " , decl); |
| 10609 | #if __GNUC__ >= 10 |
| 10610 | # pragma GCC diagnostic pop |
| 10611 | #endif |
| 10612 | if (c) |
| 10613 | dump_printf (l_dump_flags, |
| 10614 | "in %qs clause " , |
| 10615 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
| 10616 | else |
| 10617 | dump_printf (l_dump_flags, |
| 10618 | "declared in block " ); |
| 10619 | } |
| 10620 | |
| 10621 | static bool |
| 10622 | oacc_privatization_candidate_p (const location_t loc, const tree c, |
| 10623 | const tree decl) |
| 10624 | { |
| 10625 | dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags (); |
| 10626 | |
| 10627 | /* There is some differentiation depending on block vs. clause. */ |
| 10628 | bool block = !c; |
| 10629 | |
| 10630 | bool res = true; |
| 10631 | |
| 10632 | if (res && !VAR_P (decl)) |
| 10633 | { |
| 10634 | /* A PARM_DECL (appearing in a 'private' clause) is expected to have been |
| 10635 | privatized into a new VAR_DECL. */ |
| 10636 | gcc_checking_assert (TREE_CODE (decl) != PARM_DECL); |
| 10637 | |
| 10638 | res = false; |
| 10639 | |
| 10640 | if (dump_enabled_p ()) |
| 10641 | { |
| 10642 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10643 | dump_printf (l_dump_flags, |
| 10644 | "potentially has improper OpenACC privatization level: %qs\n" , |
| 10645 | get_tree_code_name (TREE_CODE (decl))); |
| 10646 | } |
| 10647 | } |
| 10648 | |
| 10649 | if (res && block && TREE_STATIC (decl)) |
| 10650 | { |
| 10651 | res = false; |
| 10652 | |
| 10653 | if (dump_enabled_p ()) |
| 10654 | { |
| 10655 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10656 | dump_printf (l_dump_flags, |
| 10657 | "isn%'t candidate for adjusting OpenACC privatization level: %s\n" , |
| 10658 | "static" ); |
| 10659 | } |
| 10660 | } |
| 10661 | |
| 10662 | if (res && block && DECL_EXTERNAL (decl)) |
| 10663 | { |
| 10664 | res = false; |
| 10665 | |
| 10666 | if (dump_enabled_p ()) |
| 10667 | { |
| 10668 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10669 | dump_printf (l_dump_flags, |
| 10670 | "isn%'t candidate for adjusting OpenACC privatization level: %s\n" , |
| 10671 | "external" ); |
| 10672 | } |
| 10673 | } |
| 10674 | |
| 10675 | if (res && !TREE_ADDRESSABLE (decl)) |
| 10676 | { |
| 10677 | res = false; |
| 10678 | |
| 10679 | if (dump_enabled_p ()) |
| 10680 | { |
| 10681 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10682 | dump_printf (l_dump_flags, |
| 10683 | "isn%'t candidate for adjusting OpenACC privatization level: %s\n" , |
| 10684 | "not addressable" ); |
| 10685 | } |
| 10686 | } |
| 10687 | |
| 10688 | /* If an artificial variable has been added to a bind, e.g. |
| 10689 | a compiler-generated temporary structure used by the Fortran front-end, do |
| 10690 | not consider it as a privatization candidate. Note that variables on |
| 10691 | the stack are private per-thread by default: making them "gang-private" |
| 10692 | for OpenACC actually means to share a single instance of a variable |
| 10693 | amongst all workers and threads spawned within each gang. |
| 10694 | At present, no compiler-generated artificial variables require such |
| 10695 | sharing semantics, so this is safe. */ |
| 10696 | |
| 10697 | if (res && block && DECL_ARTIFICIAL (decl)) |
| 10698 | { |
| 10699 | res = false; |
| 10700 | |
| 10701 | if (dump_enabled_p ()) |
| 10702 | { |
| 10703 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10704 | dump_printf (l_dump_flags, |
| 10705 | "isn%'t candidate for adjusting OpenACC privatization " |
| 10706 | "level: %s\n" , "artificial" ); |
| 10707 | } |
| 10708 | } |
| 10709 | |
| 10710 | if (res) |
| 10711 | { |
| 10712 | if (dump_enabled_p ()) |
| 10713 | { |
| 10714 | oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl); |
| 10715 | dump_printf (l_dump_flags, |
| 10716 | "is candidate for adjusting OpenACC privatization level\n" ); |
| 10717 | } |
| 10718 | } |
| 10719 | |
| 10720 | if (dump_file && (dump_flags & TDF_DETAILS)) |
| 10721 | { |
| 10722 | print_generic_decl (dump_file, decl, dump_flags); |
| 10723 | fprintf (stream: dump_file, format: "\n" ); |
| 10724 | } |
| 10725 | |
| 10726 | return res; |
| 10727 | } |
| 10728 | |
| 10729 | /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in |
| 10730 | CTX. */ |
| 10731 | |
| 10732 | static void |
| 10733 | oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses) |
| 10734 | { |
| 10735 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 10736 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE) |
| 10737 | { |
| 10738 | tree decl = OMP_CLAUSE_DECL (c); |
| 10739 | |
| 10740 | tree new_decl = lookup_decl (var: decl, ctx); |
| 10741 | |
| 10742 | if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, |
| 10743 | decl: new_decl)) |
| 10744 | continue; |
| 10745 | |
| 10746 | gcc_checking_assert |
| 10747 | (!ctx->oacc_privatization_candidates.contains (new_decl)); |
| 10748 | ctx->oacc_privatization_candidates.safe_push (obj: new_decl); |
| 10749 | } |
| 10750 | } |
| 10751 | |
| 10752 | /* Scan DECLS for candidates for adjusting OpenACC privatization level in |
| 10753 | CTX. */ |
| 10754 | |
| 10755 | static void |
| 10756 | oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls) |
| 10757 | { |
| 10758 | for (tree decl = decls; decl; decl = DECL_CHAIN (decl)) |
| 10759 | { |
| 10760 | tree new_decl = lookup_decl (var: decl, ctx); |
| 10761 | gcc_checking_assert (new_decl == decl); |
| 10762 | |
| 10763 | if (!oacc_privatization_candidate_p (loc: gimple_location (g: ctx->stmt), NULL, |
| 10764 | decl: new_decl)) |
| 10765 | continue; |
| 10766 | |
| 10767 | gcc_checking_assert |
| 10768 | (!ctx->oacc_privatization_candidates.contains (new_decl)); |
| 10769 | ctx->oacc_privatization_candidates.safe_push (obj: new_decl); |
| 10770 | } |
| 10771 | } |
| 10772 | |
| 10773 | /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */ |
| 10774 | |
| 10775 | static tree |
| 10776 | omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| 10777 | struct walk_stmt_info *wi) |
| 10778 | { |
| 10779 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 10780 | |
| 10781 | *handled_ops_p = true; |
| 10782 | switch (gimple_code (g: stmt)) |
| 10783 | { |
| 10784 | WALK_SUBSTMTS; |
| 10785 | |
| 10786 | case GIMPLE_OMP_FOR: |
| 10787 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_SIMD |
| 10788 | && gimple_omp_for_combined_into_p (g: stmt)) |
| 10789 | *handled_ops_p = false; |
| 10790 | break; |
| 10791 | |
| 10792 | case GIMPLE_OMP_SCAN: |
| 10793 | *(gimple_stmt_iterator *) (wi->info) = *gsi_p; |
| 10794 | return integer_zero_node; |
| 10795 | default: |
| 10796 | break; |
| 10797 | } |
| 10798 | return NULL; |
| 10799 | } |
| 10800 | |
| 10801 | /* Helper function for lower_omp_for, add transformations for a worksharing |
| 10802 | loop with scan directives inside of it. |
| 10803 | For worksharing loop not combined with simd, transform: |
| 10804 | #pragma omp for reduction(inscan,+:r) private(i) |
| 10805 | for (i = 0; i < n; i = i + 1) |
| 10806 | { |
| 10807 | { |
| 10808 | update (r); |
| 10809 | } |
| 10810 | #pragma omp scan inclusive(r) |
| 10811 | { |
| 10812 | use (r); |
| 10813 | } |
| 10814 | } |
| 10815 | |
| 10816 | into two worksharing loops + code to merge results: |
| 10817 | |
| 10818 | num_threads = omp_get_num_threads (); |
| 10819 | thread_num = omp_get_thread_num (); |
| 10820 | if (thread_num == 0) goto <D.2099>; else goto <D.2100>; |
| 10821 | <D.2099>: |
| 10822 | var2 = r; |
| 10823 | goto <D.2101>; |
| 10824 | <D.2100>: |
| 10825 | // For UDRs this is UDR init, or if ctors are needed, copy from |
| 10826 | // var3 that has been constructed to contain the neutral element. |
| 10827 | var2 = 0; |
| 10828 | <D.2101>: |
| 10829 | ivar = 0; |
| 10830 | // The _scantemp_ clauses will arrange for rpriva to be initialized to |
| 10831 | // a shared array with num_threads elements and rprivb to a local array |
| 10832 | // number of elements equal to the number of (contiguous) iterations the |
| 10833 | // current thread will perform. controlb and controlp variables are |
| 10834 | // temporaries to handle deallocation of rprivb at the end of second |
| 10835 | // GOMP_FOR. |
| 10836 | #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \ |
| 10837 | _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait |
| 10838 | for (i = 0; i < n; i = i + 1) |
| 10839 | { |
| 10840 | { |
| 10841 | // For UDRs this is UDR init or copy from var3. |
| 10842 | r = 0; |
| 10843 | // This is the input phase from user code. |
| 10844 | update (r); |
| 10845 | } |
| 10846 | { |
| 10847 | // For UDRs this is UDR merge. |
| 10848 | var2 = var2 + r; |
| 10849 | // Rather than handing it over to the user, save to local thread's |
| 10850 | // array. |
| 10851 | rprivb[ivar] = var2; |
| 10852 | // For exclusive scan, the above two statements are swapped. |
| 10853 | ivar = ivar + 1; |
| 10854 | } |
| 10855 | } |
| 10856 | // And remember the final value from this thread's into the shared |
| 10857 | // rpriva array. |
| 10858 | rpriva[(sizetype) thread_num] = var2; |
| 10859 | // If more than one thread, compute using Work-Efficient prefix sum |
| 10860 | // the inclusive parallel scan of the rpriva array. |
| 10861 | if (num_threads > 1) goto <D.2102>; else goto <D.2103>; |
| 10862 | <D.2102>: |
| 10863 | GOMP_barrier (); |
| 10864 | down = 0; |
| 10865 | k = 1; |
| 10866 | num_threadsu = (unsigned int) num_threads; |
| 10867 | thread_numup1 = (unsigned int) thread_num + 1; |
| 10868 | <D.2108>: |
| 10869 | twok = k << 1; |
| 10870 | if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>; |
| 10871 | <D.2110>: |
| 10872 | down = 4294967295; |
| 10873 | k = k >> 1; |
| 10874 | if (k == num_threadsu) goto <D.2112>; else goto <D.2111>; |
| 10875 | <D.2112>: |
| 10876 | k = k >> 1; |
| 10877 | <D.2111>: |
| 10878 | twok = k << 1; |
| 10879 | cplx = .MUL_OVERFLOW (thread_nump1, twok); |
| 10880 | mul = REALPART_EXPR <cplx>; |
| 10881 | ovf = IMAGPART_EXPR <cplx>; |
| 10882 | if (ovf == 0) goto <D.2116>; else goto <D.2117>; |
| 10883 | <D.2116>: |
| 10884 | andv = k & down; |
| 10885 | andvm1 = andv + 4294967295; |
| 10886 | l = mul + andvm1; |
| 10887 | if (l < num_threadsu) goto <D.2120>; else goto <D.2117>; |
| 10888 | <D.2120>: |
| 10889 | // For UDRs this is UDR merge, performed using var2 variable as temporary, |
| 10890 | // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2; |
| 10891 | rpriva[l] = rpriva[l - k] + rpriva[l]; |
| 10892 | <D.2117>: |
| 10893 | if (down == 0) goto <D.2121>; else goto <D.2122>; |
| 10894 | <D.2121>: |
| 10895 | k = k << 1; |
| 10896 | goto <D.2123>; |
| 10897 | <D.2122>: |
| 10898 | k = k >> 1; |
| 10899 | <D.2123>: |
| 10900 | GOMP_barrier (); |
| 10901 | if (k != 0) goto <D.2108>; else goto <D.2103>; |
| 10902 | <D.2103>: |
| 10903 | if (thread_num == 0) goto <D.2124>; else goto <D.2125>; |
| 10904 | <D.2124>: |
| 10905 | // For UDRs this is UDR init or copy from var3. |
| 10906 | var2 = 0; |
| 10907 | goto <D.2126>; |
| 10908 | <D.2125>: |
| 10909 | var2 = rpriva[thread_num - 1]; |
| 10910 | <D.2126>: |
| 10911 | ivar = 0; |
| 10912 | #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \ |
| 10913 | reduction(inscan,+:r) private(i) |
| 10914 | for (i = 0; i < n; i = i + 1) |
| 10915 | { |
| 10916 | { |
| 10917 | // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]); |
| 10918 | r = var2 + rprivb[ivar]; |
| 10919 | } |
| 10920 | { |
| 10921 | // This is the scan phase from user code. |
| 10922 | use (r); |
| 10923 | // Plus a bump of the iterator. |
| 10924 | ivar = ivar + 1; |
| 10925 | } |
| 10926 | } */ |
| 10927 | |
| 10928 | static void |
| 10929 | lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt, |
| 10930 | struct omp_for_data *fd, omp_context *ctx) |
| 10931 | { |
| 10932 | bool is_for_simd = gimple_omp_for_combined_p (g: stmt); |
| 10933 | gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive); |
| 10934 | |
| 10935 | gimple_seq body = gimple_omp_body (gs: stmt); |
| 10936 | gimple_stmt_iterator input1_gsi = gsi_none (); |
| 10937 | struct walk_stmt_info wi; |
| 10938 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 10939 | wi.val_only = true; |
| 10940 | wi.info = (void *) &input1_gsi; |
| 10941 | walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi); |
| 10942 | gcc_assert (!gsi_end_p (input1_gsi)); |
| 10943 | |
| 10944 | gimple *input_stmt1 = gsi_stmt (i: input1_gsi); |
| 10945 | gimple_stmt_iterator gsi = input1_gsi; |
| 10946 | gsi_next (i: &gsi); |
| 10947 | gimple_stmt_iterator scan1_gsi = gsi; |
| 10948 | gimple *scan_stmt1 = gsi_stmt (i: gsi); |
| 10949 | gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN); |
| 10950 | |
| 10951 | gimple_seq input_body = gimple_omp_body (gs: input_stmt1); |
| 10952 | gimple_seq scan_body = gimple_omp_body (gs: scan_stmt1); |
| 10953 | gimple_omp_set_body (gs: input_stmt1, NULL); |
| 10954 | gimple_omp_set_body (gs: scan_stmt1, NULL); |
| 10955 | gimple_omp_set_body (gs: stmt, NULL); |
| 10956 | |
| 10957 | gomp_for *new_stmt = as_a <gomp_for *> (p: gimple_copy (stmt)); |
| 10958 | gimple_seq new_body = copy_gimple_seq_and_replace_locals (seq: body); |
| 10959 | gimple_omp_set_body (gs: stmt, body); |
| 10960 | gimple_omp_set_body (gs: input_stmt1, body: input_body); |
| 10961 | |
| 10962 | gimple_stmt_iterator input2_gsi = gsi_none (); |
| 10963 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 10964 | wi.val_only = true; |
| 10965 | wi.info = (void *) &input2_gsi; |
| 10966 | walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi); |
| 10967 | gcc_assert (!gsi_end_p (input2_gsi)); |
| 10968 | |
| 10969 | gimple *input_stmt2 = gsi_stmt (i: input2_gsi); |
| 10970 | gsi = input2_gsi; |
| 10971 | gsi_next (i: &gsi); |
| 10972 | gimple_stmt_iterator scan2_gsi = gsi; |
| 10973 | gimple *scan_stmt2 = gsi_stmt (i: gsi); |
| 10974 | gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN); |
| 10975 | gimple_omp_set_body (gs: scan_stmt2, body: scan_body); |
| 10976 | |
| 10977 | gimple_stmt_iterator input3_gsi = gsi_none (); |
| 10978 | gimple_stmt_iterator scan3_gsi = gsi_none (); |
| 10979 | gimple_stmt_iterator input4_gsi = gsi_none (); |
| 10980 | gimple_stmt_iterator scan4_gsi = gsi_none (); |
| 10981 | gimple *input_stmt3 = NULL, *scan_stmt3 = NULL; |
| 10982 | gimple *input_stmt4 = NULL, *scan_stmt4 = NULL; |
| 10983 | omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL; |
| 10984 | if (is_for_simd) |
| 10985 | { |
| 10986 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 10987 | wi.val_only = true; |
| 10988 | wi.info = (void *) &input3_gsi; |
| 10989 | walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi); |
| 10990 | gcc_assert (!gsi_end_p (input3_gsi)); |
| 10991 | |
| 10992 | input_stmt3 = gsi_stmt (i: input3_gsi); |
| 10993 | gsi = input3_gsi; |
| 10994 | gsi_next (i: &gsi); |
| 10995 | scan3_gsi = gsi; |
| 10996 | scan_stmt3 = gsi_stmt (i: gsi); |
| 10997 | gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN); |
| 10998 | |
| 10999 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 11000 | wi.val_only = true; |
| 11001 | wi.info = (void *) &input4_gsi; |
| 11002 | walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi); |
| 11003 | gcc_assert (!gsi_end_p (input4_gsi)); |
| 11004 | |
| 11005 | input_stmt4 = gsi_stmt (i: input4_gsi); |
| 11006 | gsi = input4_gsi; |
| 11007 | gsi_next (i: &gsi); |
| 11008 | scan4_gsi = gsi; |
| 11009 | scan_stmt4 = gsi_stmt (i: gsi); |
| 11010 | gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN); |
| 11011 | |
| 11012 | input_simd_ctx = maybe_lookup_ctx (stmt: input_stmt3)->outer; |
| 11013 | scan_simd_ctx = maybe_lookup_ctx (stmt: input_stmt4)->outer; |
| 11014 | } |
| 11015 | |
| 11016 | tree num_threads = create_tmp_var (integer_type_node); |
| 11017 | tree thread_num = create_tmp_var (integer_type_node); |
| 11018 | tree nthreads_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_NUM_THREADS); |
| 11019 | tree threadnum_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM); |
| 11020 | gimple *g = gimple_build_call (nthreads_decl, 0); |
| 11021 | gimple_call_set_lhs (gs: g, lhs: num_threads); |
| 11022 | gimple_seq_add_stmt (body_p, g); |
| 11023 | g = gimple_build_call (threadnum_decl, 0); |
| 11024 | gimple_call_set_lhs (gs: g, lhs: thread_num); |
| 11025 | gimple_seq_add_stmt (body_p, g); |
| 11026 | |
| 11027 | tree ivar = create_tmp_var (sizetype); |
| 11028 | tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE; |
| 11029 | tree *cp1 = &new_clauses1, *cp2 = &new_clauses2; |
| 11030 | tree k = create_tmp_var (unsigned_type_node); |
| 11031 | tree l = create_tmp_var (unsigned_type_node); |
| 11032 | |
| 11033 | gimple_seq clist = NULL, mdlist = NULL; |
| 11034 | gimple_seq thr01_list = NULL, thrn1_list = NULL; |
| 11035 | gimple_seq thr02_list = NULL, thrn2_list = NULL; |
| 11036 | gimple_seq scan1_list = NULL, input2_list = NULL; |
| 11037 | gimple_seq last_list = NULL, reduc_list = NULL; |
| 11038 | for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 11039 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
| 11040 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
| 11041 | { |
| 11042 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 11043 | tree var = OMP_CLAUSE_DECL (c); |
| 11044 | tree new_var = lookup_decl (var, ctx); |
| 11045 | tree var3 = NULL_TREE; |
| 11046 | tree new_vard = new_var; |
| 11047 | if (omp_privatize_by_reference (decl: var)) |
| 11048 | new_var = build_simple_mem_ref_loc (clause_loc, new_var); |
| 11049 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 11050 | { |
| 11051 | var3 = maybe_lookup_decl (var: new_vard, ctx); |
| 11052 | if (var3 == new_vard) |
| 11053 | var3 = NULL_TREE; |
| 11054 | } |
| 11055 | |
| 11056 | tree ptype = build_pointer_type (TREE_TYPE (new_var)); |
| 11057 | tree rpriva = create_tmp_var (ptype); |
| 11058 | tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_); |
| 11059 | OMP_CLAUSE_DECL (nc) = rpriva; |
| 11060 | *cp1 = nc; |
| 11061 | cp1 = &OMP_CLAUSE_CHAIN (nc); |
| 11062 | |
| 11063 | tree rprivb = create_tmp_var (ptype); |
| 11064 | nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_); |
| 11065 | OMP_CLAUSE_DECL (nc) = rprivb; |
| 11066 | OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1; |
| 11067 | *cp1 = nc; |
| 11068 | cp1 = &OMP_CLAUSE_CHAIN (nc); |
| 11069 | |
| 11070 | tree var2 = create_tmp_var_raw (TREE_TYPE (new_var)); |
| 11071 | if (new_vard != new_var) |
| 11072 | TREE_ADDRESSABLE (var2) = 1; |
| 11073 | gimple_add_tmp_var (var2); |
| 11074 | |
| 11075 | tree x = fold_convert_loc (clause_loc, sizetype, thread_num); |
| 11076 | x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, |
| 11077 | TYPE_SIZE_UNIT (TREE_TYPE (ptype))); |
| 11078 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); |
| 11079 | tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x); |
| 11080 | |
| 11081 | x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node, |
| 11082 | thread_num, integer_minus_one_node); |
| 11083 | x = fold_convert_loc (clause_loc, sizetype, x); |
| 11084 | x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, |
| 11085 | TYPE_SIZE_UNIT (TREE_TYPE (ptype))); |
| 11086 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); |
| 11087 | tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x); |
| 11088 | |
| 11089 | x = fold_convert_loc (clause_loc, sizetype, l); |
| 11090 | x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, |
| 11091 | TYPE_SIZE_UNIT (TREE_TYPE (ptype))); |
| 11092 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); |
| 11093 | tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x); |
| 11094 | |
| 11095 | x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k); |
| 11096 | x = fold_convert_loc (clause_loc, sizetype, x); |
| 11097 | x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, |
| 11098 | TYPE_SIZE_UNIT (TREE_TYPE (ptype))); |
| 11099 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); |
| 11100 | tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x); |
| 11101 | |
| 11102 | x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar, |
| 11103 | TYPE_SIZE_UNIT (TREE_TYPE (ptype))); |
| 11104 | x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x); |
| 11105 | tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x); |
| 11106 | |
| 11107 | tree var4 = is_for_simd ? new_var : var2; |
| 11108 | tree var5 = NULL_TREE, var6 = NULL_TREE; |
| 11109 | if (is_for_simd) |
| 11110 | { |
| 11111 | var5 = lookup_decl (var, ctx: input_simd_ctx); |
| 11112 | var6 = lookup_decl (var, ctx: scan_simd_ctx); |
| 11113 | if (new_vard != new_var) |
| 11114 | { |
| 11115 | var5 = build_simple_mem_ref_loc (clause_loc, var5); |
| 11116 | var6 = build_simple_mem_ref_loc (clause_loc, var6); |
| 11117 | } |
| 11118 | } |
| 11119 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
| 11120 | { |
| 11121 | tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
| 11122 | tree val = var2; |
| 11123 | |
| 11124 | x = lang_hooks.decls.omp_clause_default_ctor |
| 11125 | (c, var2, build_outer_var_ref (var, ctx)); |
| 11126 | if (x) |
| 11127 | gimplify_and_add (x, &clist); |
| 11128 | |
| 11129 | x = build_outer_var_ref (var, ctx); |
| 11130 | x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4), |
| 11131 | x); |
| 11132 | gimplify_and_add (x, &thr01_list); |
| 11133 | |
| 11134 | tree y = (DECL_HAS_VALUE_EXPR_P (new_vard) |
| 11135 | ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); |
| 11136 | if (var3) |
| 11137 | { |
| 11138 | x = unshare_expr (var4); |
| 11139 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var3); |
| 11140 | gimplify_and_add (x, &thrn1_list); |
| 11141 | x = unshare_expr (var4); |
| 11142 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var3); |
| 11143 | gimplify_and_add (x, &thr02_list); |
| 11144 | } |
| 11145 | else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) |
| 11146 | { |
| 11147 | /* Otherwise, assign to it the identity element. */ |
| 11148 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 11149 | tseq = copy_gimple_seq_and_replace_locals (seq: tseq); |
| 11150 | if (!is_for_simd) |
| 11151 | { |
| 11152 | if (new_vard != new_var) |
| 11153 | val = build_fold_addr_expr_loc (clause_loc, val); |
| 11154 | SET_DECL_VALUE_EXPR (new_vard, val); |
| 11155 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 11156 | } |
| 11157 | SET_DECL_VALUE_EXPR (placeholder, error_mark_node); |
| 11158 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 11159 | lower_omp (&tseq, ctx); |
| 11160 | gimple_seq_add_seq (&thrn1_list, tseq); |
| 11161 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); |
| 11162 | lower_omp (&tseq, ctx); |
| 11163 | gimple_seq_add_seq (&thr02_list, tseq); |
| 11164 | SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); |
| 11165 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 11166 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
| 11167 | if (y) |
| 11168 | SET_DECL_VALUE_EXPR (new_vard, y); |
| 11169 | else |
| 11170 | { |
| 11171 | DECL_HAS_VALUE_EXPR_P (new_vard) = 0; |
| 11172 | SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); |
| 11173 | } |
| 11174 | } |
| 11175 | |
| 11176 | x = unshare_expr (var4); |
| 11177 | x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref); |
| 11178 | gimplify_and_add (x, &thrn2_list); |
| 11179 | |
| 11180 | if (is_for_simd) |
| 11181 | { |
| 11182 | x = unshare_expr (rprivb_ref); |
| 11183 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var5); |
| 11184 | gimplify_and_add (x, &scan1_list); |
| 11185 | } |
| 11186 | else |
| 11187 | { |
| 11188 | if (ctx->scan_exclusive) |
| 11189 | { |
| 11190 | x = unshare_expr (rprivb_ref); |
| 11191 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var2); |
| 11192 | gimplify_and_add (x, &scan1_list); |
| 11193 | } |
| 11194 | |
| 11195 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 11196 | tseq = copy_gimple_seq_and_replace_locals (seq: tseq); |
| 11197 | SET_DECL_VALUE_EXPR (placeholder, var2); |
| 11198 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 11199 | lower_omp (&tseq, ctx); |
| 11200 | gimple_seq_add_seq (&scan1_list, tseq); |
| 11201 | |
| 11202 | if (ctx->scan_inclusive) |
| 11203 | { |
| 11204 | x = unshare_expr (rprivb_ref); |
| 11205 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var2); |
| 11206 | gimplify_and_add (x, &scan1_list); |
| 11207 | } |
| 11208 | } |
| 11209 | |
| 11210 | x = unshare_expr (rpriva_ref); |
| 11211 | x = lang_hooks.decls.omp_clause_assign_op (c, x, |
| 11212 | unshare_expr (var4)); |
| 11213 | gimplify_and_add (x, &mdlist); |
| 11214 | |
| 11215 | x = unshare_expr (is_for_simd ? var6 : new_var); |
| 11216 | x = lang_hooks.decls.omp_clause_assign_op (c, x, var4); |
| 11217 | gimplify_and_add (x, &input2_list); |
| 11218 | |
| 11219 | val = rprivb_ref; |
| 11220 | if (new_vard != new_var) |
| 11221 | val = build_fold_addr_expr_loc (clause_loc, val); |
| 11222 | |
| 11223 | gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 11224 | tseq = copy_gimple_seq_and_replace_locals (seq: tseq); |
| 11225 | SET_DECL_VALUE_EXPR (new_vard, val); |
| 11226 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 11227 | if (is_for_simd) |
| 11228 | { |
| 11229 | SET_DECL_VALUE_EXPR (placeholder, var6); |
| 11230 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 11231 | } |
| 11232 | else |
| 11233 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 11234 | lower_omp (&tseq, ctx); |
| 11235 | if (y) |
| 11236 | SET_DECL_VALUE_EXPR (new_vard, y); |
| 11237 | else |
| 11238 | { |
| 11239 | DECL_HAS_VALUE_EXPR_P (new_vard) = 0; |
| 11240 | SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); |
| 11241 | } |
| 11242 | if (!is_for_simd) |
| 11243 | { |
| 11244 | SET_DECL_VALUE_EXPR (placeholder, new_var); |
| 11245 | DECL_HAS_VALUE_EXPR_P (placeholder) = 1; |
| 11246 | lower_omp (&tseq, ctx); |
| 11247 | } |
| 11248 | gimple_seq_add_seq (&input2_list, tseq); |
| 11249 | |
| 11250 | x = build_outer_var_ref (var, ctx); |
| 11251 | x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref); |
| 11252 | gimplify_and_add (x, &last_list); |
| 11253 | |
| 11254 | x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref); |
| 11255 | gimplify_and_add (x, &reduc_list); |
| 11256 | tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); |
| 11257 | tseq = copy_gimple_seq_and_replace_locals (seq: tseq); |
| 11258 | val = rprival_ref; |
| 11259 | if (new_vard != new_var) |
| 11260 | val = build_fold_addr_expr_loc (clause_loc, val); |
| 11261 | SET_DECL_VALUE_EXPR (new_vard, val); |
| 11262 | DECL_HAS_VALUE_EXPR_P (new_vard) = 1; |
| 11263 | SET_DECL_VALUE_EXPR (placeholder, var2); |
| 11264 | lower_omp (&tseq, ctx); |
| 11265 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; |
| 11266 | SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); |
| 11267 | DECL_HAS_VALUE_EXPR_P (placeholder) = 0; |
| 11268 | if (y) |
| 11269 | SET_DECL_VALUE_EXPR (new_vard, y); |
| 11270 | else |
| 11271 | { |
| 11272 | DECL_HAS_VALUE_EXPR_P (new_vard) = 0; |
| 11273 | SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); |
| 11274 | } |
| 11275 | gimple_seq_add_seq (&reduc_list, tseq); |
| 11276 | x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2); |
| 11277 | gimplify_and_add (x, &reduc_list); |
| 11278 | |
| 11279 | x = lang_hooks.decls.omp_clause_dtor (c, var2); |
| 11280 | if (x) |
| 11281 | gimplify_and_add (x, dlist); |
| 11282 | } |
| 11283 | else |
| 11284 | { |
| 11285 | x = build_outer_var_ref (var, ctx); |
| 11286 | gimplify_assign (unshare_expr (var4), x, &thr01_list); |
| 11287 | |
| 11288 | x = omp_reduction_init (clause: c, TREE_TYPE (new_var)); |
| 11289 | gimplify_assign (unshare_expr (var4), unshare_expr (x), |
| 11290 | &thrn1_list); |
| 11291 | gimplify_assign (unshare_expr (var4), x, &thr02_list); |
| 11292 | |
| 11293 | gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list); |
| 11294 | |
| 11295 | enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); |
| 11296 | if (code == MINUS_EXPR) |
| 11297 | code = PLUS_EXPR; |
| 11298 | |
| 11299 | if (is_for_simd) |
| 11300 | gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list); |
| 11301 | else |
| 11302 | { |
| 11303 | if (ctx->scan_exclusive) |
| 11304 | gimplify_assign (unshare_expr (rprivb_ref), var2, |
| 11305 | &scan1_list); |
| 11306 | x = build2 (code, TREE_TYPE (new_var), var2, new_var); |
| 11307 | gimplify_assign (var2, x, &scan1_list); |
| 11308 | if (ctx->scan_inclusive) |
| 11309 | gimplify_assign (unshare_expr (rprivb_ref), var2, |
| 11310 | &scan1_list); |
| 11311 | } |
| 11312 | |
| 11313 | gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4), |
| 11314 | &mdlist); |
| 11315 | |
| 11316 | x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref); |
| 11317 | gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list); |
| 11318 | |
| 11319 | gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref, |
| 11320 | &last_list); |
| 11321 | |
| 11322 | x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref, |
| 11323 | unshare_expr (rprival_ref)); |
| 11324 | gimplify_assign (rprival_ref, x, &reduc_list); |
| 11325 | } |
| 11326 | } |
| 11327 | |
| 11328 | g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node); |
| 11329 | gimple_seq_add_stmt (&scan1_list, g); |
| 11330 | g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node); |
| 11331 | gimple_seq_add_stmt (gimple_omp_body_ptr (gs: is_for_simd |
| 11332 | ? scan_stmt4 : scan_stmt2), g); |
| 11333 | |
| 11334 | tree controlb = create_tmp_var (boolean_type_node); |
| 11335 | tree controlp = create_tmp_var (ptr_type_node); |
| 11336 | tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); |
| 11337 | OMP_CLAUSE_DECL (nc) = controlb; |
| 11338 | OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; |
| 11339 | *cp1 = nc; |
| 11340 | cp1 = &OMP_CLAUSE_CHAIN (nc); |
| 11341 | nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); |
| 11342 | OMP_CLAUSE_DECL (nc) = controlp; |
| 11343 | OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; |
| 11344 | *cp1 = nc; |
| 11345 | cp1 = &OMP_CLAUSE_CHAIN (nc); |
| 11346 | nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); |
| 11347 | OMP_CLAUSE_DECL (nc) = controlb; |
| 11348 | OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; |
| 11349 | *cp2 = nc; |
| 11350 | cp2 = &OMP_CLAUSE_CHAIN (nc); |
| 11351 | nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); |
| 11352 | OMP_CLAUSE_DECL (nc) = controlp; |
| 11353 | OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; |
| 11354 | *cp2 = nc; |
| 11355 | cp2 = &OMP_CLAUSE_CHAIN (nc); |
| 11356 | |
| 11357 | *cp1 = gimple_omp_for_clauses (gs: stmt); |
| 11358 | gimple_omp_for_set_clauses (gs: stmt, clauses: new_clauses1); |
| 11359 | *cp2 = gimple_omp_for_clauses (gs: new_stmt); |
| 11360 | gimple_omp_for_set_clauses (gs: new_stmt, clauses: new_clauses2); |
| 11361 | |
| 11362 | if (is_for_simd) |
| 11363 | { |
| 11364 | gimple_seq_add_seq (gimple_omp_body_ptr (gs: scan_stmt3), scan1_list); |
| 11365 | gimple_seq_add_seq (gimple_omp_body_ptr (gs: input_stmt4), input2_list); |
| 11366 | |
| 11367 | gsi_insert_seq_after (&input3_gsi, gimple_omp_body (gs: input_stmt3), |
| 11368 | GSI_SAME_STMT); |
| 11369 | gsi_remove (&input3_gsi, true); |
| 11370 | gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (gs: scan_stmt3), |
| 11371 | GSI_SAME_STMT); |
| 11372 | gsi_remove (&scan3_gsi, true); |
| 11373 | gsi_insert_seq_after (&input4_gsi, gimple_omp_body (gs: input_stmt4), |
| 11374 | GSI_SAME_STMT); |
| 11375 | gsi_remove (&input4_gsi, true); |
| 11376 | gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (gs: scan_stmt4), |
| 11377 | GSI_SAME_STMT); |
| 11378 | gsi_remove (&scan4_gsi, true); |
| 11379 | } |
| 11380 | else |
| 11381 | { |
| 11382 | gimple_omp_set_body (gs: scan_stmt1, body: scan1_list); |
| 11383 | gimple_omp_set_body (gs: input_stmt2, body: input2_list); |
| 11384 | } |
| 11385 | |
| 11386 | gsi_insert_seq_after (&input1_gsi, gimple_omp_body (gs: input_stmt1), |
| 11387 | GSI_SAME_STMT); |
| 11388 | gsi_remove (&input1_gsi, true); |
| 11389 | gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (gs: scan_stmt1), |
| 11390 | GSI_SAME_STMT); |
| 11391 | gsi_remove (&scan1_gsi, true); |
| 11392 | gsi_insert_seq_after (&input2_gsi, gimple_omp_body (gs: input_stmt2), |
| 11393 | GSI_SAME_STMT); |
| 11394 | gsi_remove (&input2_gsi, true); |
| 11395 | gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (gs: scan_stmt2), |
| 11396 | GSI_SAME_STMT); |
| 11397 | gsi_remove (&scan2_gsi, true); |
| 11398 | |
| 11399 | gimple_seq_add_seq (body_p, clist); |
| 11400 | |
| 11401 | tree lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 11402 | tree lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 11403 | tree lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 11404 | g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2); |
| 11405 | gimple_seq_add_stmt (body_p, g); |
| 11406 | g = gimple_build_label (label: lab1); |
| 11407 | gimple_seq_add_stmt (body_p, g); |
| 11408 | gimple_seq_add_seq (body_p, thr01_list); |
| 11409 | g = gimple_build_goto (dest: lab3); |
| 11410 | gimple_seq_add_stmt (body_p, g); |
| 11411 | g = gimple_build_label (label: lab2); |
| 11412 | gimple_seq_add_stmt (body_p, g); |
| 11413 | gimple_seq_add_seq (body_p, thrn1_list); |
| 11414 | g = gimple_build_label (label: lab3); |
| 11415 | gimple_seq_add_stmt (body_p, g); |
| 11416 | |
| 11417 | g = gimple_build_assign (ivar, size_zero_node); |
| 11418 | gimple_seq_add_stmt (body_p, g); |
| 11419 | |
| 11420 | gimple_seq_add_stmt (body_p, stmt); |
| 11421 | gimple_seq_add_seq (body_p, body); |
| 11422 | gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v, |
| 11423 | fd->loop.v)); |
| 11424 | |
| 11425 | g = gimple_build_omp_return (true); |
| 11426 | gimple_seq_add_stmt (body_p, g); |
| 11427 | gimple_seq_add_seq (body_p, mdlist); |
| 11428 | |
| 11429 | lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 11430 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 11431 | g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2); |
| 11432 | gimple_seq_add_stmt (body_p, g); |
| 11433 | g = gimple_build_label (label: lab1); |
| 11434 | gimple_seq_add_stmt (body_p, g); |
| 11435 | |
| 11436 | g = omp_build_barrier (NULL); |
| 11437 | gimple_seq_add_stmt (body_p, g); |
| 11438 | |
| 11439 | tree down = create_tmp_var (unsigned_type_node); |
| 11440 | g = gimple_build_assign (down, build_zero_cst (unsigned_type_node)); |
| 11441 | gimple_seq_add_stmt (body_p, g); |
| 11442 | |
| 11443 | g = gimple_build_assign (k, build_one_cst (unsigned_type_node)); |
| 11444 | gimple_seq_add_stmt (body_p, g); |
| 11445 | |
| 11446 | tree num_threadsu = create_tmp_var (unsigned_type_node); |
| 11447 | g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads); |
| 11448 | gimple_seq_add_stmt (body_p, g); |
| 11449 | |
| 11450 | tree thread_numu = create_tmp_var (unsigned_type_node); |
| 11451 | g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num); |
| 11452 | gimple_seq_add_stmt (body_p, g); |
| 11453 | |
| 11454 | tree thread_nump1 = create_tmp_var (unsigned_type_node); |
| 11455 | g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu, |
| 11456 | build_int_cst (unsigned_type_node, 1)); |
| 11457 | gimple_seq_add_stmt (body_p, g); |
| 11458 | |
| 11459 | lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 11460 | g = gimple_build_label (label: lab3); |
| 11461 | gimple_seq_add_stmt (body_p, g); |
| 11462 | |
| 11463 | tree twok = create_tmp_var (unsigned_type_node); |
| 11464 | g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node); |
| 11465 | gimple_seq_add_stmt (body_p, g); |
| 11466 | |
| 11467 | tree lab4 = create_artificial_label (UNKNOWN_LOCATION); |
| 11468 | tree lab5 = create_artificial_label (UNKNOWN_LOCATION); |
| 11469 | tree lab6 = create_artificial_label (UNKNOWN_LOCATION); |
| 11470 | g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5); |
| 11471 | gimple_seq_add_stmt (body_p, g); |
| 11472 | g = gimple_build_label (label: lab4); |
| 11473 | gimple_seq_add_stmt (body_p, g); |
| 11474 | g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node)); |
| 11475 | gimple_seq_add_stmt (body_p, g); |
| 11476 | g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); |
| 11477 | gimple_seq_add_stmt (body_p, g); |
| 11478 | |
| 11479 | g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5); |
| 11480 | gimple_seq_add_stmt (body_p, g); |
| 11481 | g = gimple_build_label (label: lab6); |
| 11482 | gimple_seq_add_stmt (body_p, g); |
| 11483 | |
| 11484 | g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); |
| 11485 | gimple_seq_add_stmt (body_p, g); |
| 11486 | |
| 11487 | g = gimple_build_label (label: lab5); |
| 11488 | gimple_seq_add_stmt (body_p, g); |
| 11489 | |
| 11490 | g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node); |
| 11491 | gimple_seq_add_stmt (body_p, g); |
| 11492 | |
| 11493 | tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, named: false)); |
| 11494 | g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok); |
| 11495 | gimple_call_set_lhs (gs: g, lhs: cplx); |
| 11496 | gimple_seq_add_stmt (body_p, g); |
| 11497 | tree mul = create_tmp_var (unsigned_type_node); |
| 11498 | g = gimple_build_assign (mul, REALPART_EXPR, |
| 11499 | build1 (REALPART_EXPR, unsigned_type_node, cplx)); |
| 11500 | gimple_seq_add_stmt (body_p, g); |
| 11501 | tree ovf = create_tmp_var (unsigned_type_node); |
| 11502 | g = gimple_build_assign (ovf, IMAGPART_EXPR, |
| 11503 | build1 (IMAGPART_EXPR, unsigned_type_node, cplx)); |
| 11504 | gimple_seq_add_stmt (body_p, g); |
| 11505 | |
| 11506 | tree lab7 = create_artificial_label (UNKNOWN_LOCATION); |
| 11507 | tree lab8 = create_artificial_label (UNKNOWN_LOCATION); |
| 11508 | g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node), |
| 11509 | lab7, lab8); |
| 11510 | gimple_seq_add_stmt (body_p, g); |
| 11511 | g = gimple_build_label (label: lab7); |
| 11512 | gimple_seq_add_stmt (body_p, g); |
| 11513 | |
| 11514 | tree andv = create_tmp_var (unsigned_type_node); |
| 11515 | g = gimple_build_assign (andv, BIT_AND_EXPR, k, down); |
| 11516 | gimple_seq_add_stmt (body_p, g); |
| 11517 | tree andvm1 = create_tmp_var (unsigned_type_node); |
| 11518 | g = gimple_build_assign (andvm1, PLUS_EXPR, andv, |
| 11519 | build_minus_one_cst (unsigned_type_node)); |
| 11520 | gimple_seq_add_stmt (body_p, g); |
| 11521 | |
| 11522 | g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1); |
| 11523 | gimple_seq_add_stmt (body_p, g); |
| 11524 | |
| 11525 | tree lab9 = create_artificial_label (UNKNOWN_LOCATION); |
| 11526 | g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8); |
| 11527 | gimple_seq_add_stmt (body_p, g); |
| 11528 | g = gimple_build_label (label: lab9); |
| 11529 | gimple_seq_add_stmt (body_p, g); |
| 11530 | gimple_seq_add_seq (body_p, reduc_list); |
| 11531 | g = gimple_build_label (label: lab8); |
| 11532 | gimple_seq_add_stmt (body_p, g); |
| 11533 | |
| 11534 | tree lab10 = create_artificial_label (UNKNOWN_LOCATION); |
| 11535 | tree lab11 = create_artificial_label (UNKNOWN_LOCATION); |
| 11536 | tree lab12 = create_artificial_label (UNKNOWN_LOCATION); |
| 11537 | g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node), |
| 11538 | lab10, lab11); |
| 11539 | gimple_seq_add_stmt (body_p, g); |
| 11540 | g = gimple_build_label (label: lab10); |
| 11541 | gimple_seq_add_stmt (body_p, g); |
| 11542 | g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node); |
| 11543 | gimple_seq_add_stmt (body_p, g); |
| 11544 | g = gimple_build_goto (dest: lab12); |
| 11545 | gimple_seq_add_stmt (body_p, g); |
| 11546 | g = gimple_build_label (label: lab11); |
| 11547 | gimple_seq_add_stmt (body_p, g); |
| 11548 | g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); |
| 11549 | gimple_seq_add_stmt (body_p, g); |
| 11550 | g = gimple_build_label (label: lab12); |
| 11551 | gimple_seq_add_stmt (body_p, g); |
| 11552 | |
| 11553 | g = omp_build_barrier (NULL); |
| 11554 | gimple_seq_add_stmt (body_p, g); |
| 11555 | |
| 11556 | g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node), |
| 11557 | lab3, lab2); |
| 11558 | gimple_seq_add_stmt (body_p, g); |
| 11559 | |
| 11560 | g = gimple_build_label (label: lab2); |
| 11561 | gimple_seq_add_stmt (body_p, g); |
| 11562 | |
| 11563 | lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 11564 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 11565 | lab3 = create_artificial_label (UNKNOWN_LOCATION); |
| 11566 | g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2); |
| 11567 | gimple_seq_add_stmt (body_p, g); |
| 11568 | g = gimple_build_label (label: lab1); |
| 11569 | gimple_seq_add_stmt (body_p, g); |
| 11570 | gimple_seq_add_seq (body_p, thr02_list); |
| 11571 | g = gimple_build_goto (dest: lab3); |
| 11572 | gimple_seq_add_stmt (body_p, g); |
| 11573 | g = gimple_build_label (label: lab2); |
| 11574 | gimple_seq_add_stmt (body_p, g); |
| 11575 | gimple_seq_add_seq (body_p, thrn2_list); |
| 11576 | g = gimple_build_label (label: lab3); |
| 11577 | gimple_seq_add_stmt (body_p, g); |
| 11578 | |
| 11579 | g = gimple_build_assign (ivar, size_zero_node); |
| 11580 | gimple_seq_add_stmt (body_p, g); |
| 11581 | gimple_seq_add_stmt (body_p, new_stmt); |
| 11582 | gimple_seq_add_seq (body_p, new_body); |
| 11583 | |
| 11584 | gimple_seq new_dlist = NULL; |
| 11585 | lab1 = create_artificial_label (UNKNOWN_LOCATION); |
| 11586 | lab2 = create_artificial_label (UNKNOWN_LOCATION); |
| 11587 | tree num_threadsm1 = create_tmp_var (integer_type_node); |
| 11588 | g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads, |
| 11589 | integer_minus_one_node); |
| 11590 | gimple_seq_add_stmt (&new_dlist, g); |
| 11591 | g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2); |
| 11592 | gimple_seq_add_stmt (&new_dlist, g); |
| 11593 | g = gimple_build_label (label: lab1); |
| 11594 | gimple_seq_add_stmt (&new_dlist, g); |
| 11595 | gimple_seq_add_seq (&new_dlist, last_list); |
| 11596 | g = gimple_build_label (label: lab2); |
| 11597 | gimple_seq_add_stmt (&new_dlist, g); |
| 11598 | gimple_seq_add_seq (&new_dlist, *dlist); |
| 11599 | *dlist = new_dlist; |
| 11600 | } |
| 11601 | |
| 11602 | /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing |
| 11603 | the addresses of variables to be made private at the surrounding |
| 11604 | parallelism level. Such functions appear in the gimple code stream in two |
| 11605 | forms, e.g. for a partitioned loop: |
| 11606 | |
| 11607 | .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68); |
| 11608 | .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w); |
| 11609 | .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1); |
| 11610 | .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6); |
| 11611 | |
| 11612 | or alternatively, OACC_PRIVATE can appear at the top level of a parallel, |
| 11613 | not as part of a HEAD_MARK sequence: |
| 11614 | |
| 11615 | .UNIQUE (OACC_PRIVATE, 0, 0, &w); |
| 11616 | |
| 11617 | For such stand-alone appearances, the 3rd argument is always 0, denoting |
| 11618 | gang partitioning. */ |
| 11619 | |
| 11620 | static gcall * |
| 11621 | lower_oacc_private_marker (omp_context *ctx) |
| 11622 | { |
| 11623 | if (ctx->oacc_privatization_candidates.length () == 0) |
| 11624 | return NULL; |
| 11625 | |
| 11626 | auto_vec<tree, 5> args; |
| 11627 | |
| 11628 | args.quick_push (obj: build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE)); |
| 11629 | args.quick_push (integer_zero_node); |
| 11630 | args.quick_push (integer_minus_one_node); |
| 11631 | |
| 11632 | int i; |
| 11633 | tree decl; |
| 11634 | FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl) |
| 11635 | { |
| 11636 | gcc_checking_assert (TREE_ADDRESSABLE (decl)); |
| 11637 | tree addr = build_fold_addr_expr (decl); |
| 11638 | args.safe_push (obj: addr); |
| 11639 | } |
| 11640 | |
| 11641 | return gimple_build_call_internal_vec (IFN_UNIQUE, args); |
| 11642 | } |
| 11643 | |
| 11644 | /* Lower code for an OMP loop directive. */ |
| 11645 | |
| 11646 | static void |
| 11647 | lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 11648 | { |
| 11649 | tree *rhs_p, block; |
| 11650 | struct omp_for_data fd, *fdp = NULL; |
| 11651 | gomp_for *stmt = as_a <gomp_for *> (p: gsi_stmt (i: *gsi_p)); |
| 11652 | gbind *new_stmt; |
| 11653 | gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL; |
| 11654 | gimple_seq cnt_list = NULL, clist = NULL; |
| 11655 | gimple_seq oacc_head = NULL, oacc_tail = NULL; |
| 11656 | size_t i; |
| 11657 | |
| 11658 | push_gimplify_context (); |
| 11659 | |
| 11660 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 11661 | oacc_privatization_scan_clause_chain (ctx, clauses: gimple_omp_for_clauses (gs: stmt)); |
| 11662 | |
| 11663 | lower_omp (gimple_omp_for_pre_body_ptr (gs: stmt), ctx); |
| 11664 | |
| 11665 | block = make_node (BLOCK); |
| 11666 | new_stmt = gimple_build_bind (NULL, NULL, block); |
| 11667 | /* Replace at gsi right away, so that 'stmt' is no member |
| 11668 | of a sequence anymore as we're going to add to a different |
| 11669 | one below. */ |
| 11670 | gsi_replace (gsi_p, new_stmt, true); |
| 11671 | |
| 11672 | /* Move declaration of temporaries in the loop body before we make |
| 11673 | it go away. */ |
| 11674 | omp_for_body = gimple_omp_body (gs: stmt); |
| 11675 | if (!gimple_seq_empty_p (s: omp_for_body) |
| 11676 | && gimple_code (g: gimple_seq_first_stmt (s: omp_for_body)) == GIMPLE_BIND) |
| 11677 | { |
| 11678 | gbind *inner_bind |
| 11679 | = as_a <gbind *> (p: gimple_seq_first_stmt (s: omp_for_body)); |
| 11680 | tree vars = gimple_bind_vars (bind_stmt: inner_bind); |
| 11681 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 11682 | oacc_privatization_scan_decl_chain (ctx, decls: vars); |
| 11683 | gimple_bind_append_vars (bind_stmt: new_stmt, vars); |
| 11684 | /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't |
| 11685 | keep them on the inner_bind and it's block. */ |
| 11686 | gimple_bind_set_vars (bind_stmt: inner_bind, NULL_TREE); |
| 11687 | if (gimple_bind_block (bind_stmt: inner_bind)) |
| 11688 | BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE; |
| 11689 | } |
| 11690 | |
| 11691 | if (gimple_omp_for_combined_into_p (g: stmt)) |
| 11692 | { |
| 11693 | omp_extract_for_data (for_stmt: stmt, fd: &fd, NULL); |
| 11694 | fdp = &fd; |
| 11695 | |
| 11696 | /* We need two temporaries with fd.loop.v type (istart/iend) |
| 11697 | and then (fd.collapse - 1) temporaries with the same |
| 11698 | type for count2 ... countN-1 vars if not constant. */ |
| 11699 | size_t count = 2; |
| 11700 | tree type = fd.iter_type; |
| 11701 | if (fd.collapse > 1 |
| 11702 | && TREE_CODE (fd.loop.n2) != INTEGER_CST) |
| 11703 | count += fd.collapse - 1; |
| 11704 | size_t count2 = 0; |
| 11705 | tree type2 = NULL_TREE; |
| 11706 | bool taskreg_for |
| 11707 | = (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR |
| 11708 | || gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_TASKLOOP); |
| 11709 | tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (gs: stmt); |
| 11710 | tree simtc = NULL; |
| 11711 | tree clauses = *pc; |
| 11712 | if (fd.collapse > 1 |
| 11713 | && fd.non_rect |
| 11714 | && fd.last_nonrect == fd.first_nonrect + 1 |
| 11715 | && TREE_CODE (fd.loop.n2) != INTEGER_CST) |
| 11716 | if (tree v = gimple_omp_for_index (gs: stmt, i: fd.last_nonrect)) |
| 11717 | if (!TYPE_UNSIGNED (TREE_TYPE (v))) |
| 11718 | { |
| 11719 | v = gimple_omp_for_index (gs: stmt, i: fd.first_nonrect); |
| 11720 | type2 = TREE_TYPE (v); |
| 11721 | count++; |
| 11722 | count2 = 3; |
| 11723 | } |
| 11724 | if (taskreg_for) |
| 11725 | outerc |
| 11726 | = omp_find_clause (clauses: gimple_omp_taskreg_clauses (gs: ctx->outer->stmt), |
| 11727 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 11728 | if (ctx->simt_stmt) |
| 11729 | simtc = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->simt_stmt), |
| 11730 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 11731 | for (i = 0; i < count + count2; i++) |
| 11732 | { |
| 11733 | tree temp; |
| 11734 | if (taskreg_for) |
| 11735 | { |
| 11736 | gcc_assert (outerc); |
| 11737 | temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx: ctx->outer); |
| 11738 | outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc), |
| 11739 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 11740 | } |
| 11741 | else |
| 11742 | { |
| 11743 | /* If there are 2 adjacent SIMD stmts, one with _simt_ |
| 11744 | clause, another without, make sure they have the same |
| 11745 | decls in _looptemp_ clauses, because the outer stmt |
| 11746 | they are combined into will look up just one inner_stmt. */ |
| 11747 | if (ctx->simt_stmt) |
| 11748 | temp = OMP_CLAUSE_DECL (simtc); |
| 11749 | else |
| 11750 | temp = create_tmp_var (i >= count ? type2 : type); |
| 11751 | insert_decl_map (&ctx->outer->cb, temp, temp); |
| 11752 | } |
| 11753 | *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_); |
| 11754 | OMP_CLAUSE_DECL (*pc) = temp; |
| 11755 | pc = &OMP_CLAUSE_CHAIN (*pc); |
| 11756 | if (ctx->simt_stmt) |
| 11757 | simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc), |
| 11758 | kind: OMP_CLAUSE__LOOPTEMP_); |
| 11759 | } |
| 11760 | *pc = clauses; |
| 11761 | } |
| 11762 | |
| 11763 | /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */ |
| 11764 | dlist = NULL; |
| 11765 | body = NULL; |
| 11766 | tree rclauses |
| 11767 | = omp_task_reductions_find_first (clauses: gimple_omp_for_clauses (gs: stmt), code: OMP_FOR, |
| 11768 | ccode: OMP_CLAUSE_REDUCTION); |
| 11769 | tree rtmp = NULL_TREE; |
| 11770 | if (rclauses) |
| 11771 | { |
| 11772 | tree type = build_pointer_type (pointer_sized_int_node); |
| 11773 | tree temp = create_tmp_var (type); |
| 11774 | tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); |
| 11775 | OMP_CLAUSE_DECL (c) = temp; |
| 11776 | OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: stmt); |
| 11777 | gimple_omp_for_set_clauses (gs: stmt, clauses: c); |
| 11778 | lower_omp_task_reductions (ctx, code: OMP_FOR, |
| 11779 | clauses: gimple_omp_for_clauses (gs: stmt), |
| 11780 | start: &tred_ilist, end: &tred_dlist); |
| 11781 | rclauses = c; |
| 11782 | rtmp = make_ssa_name (var: type); |
| 11783 | gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp)); |
| 11784 | } |
| 11785 | |
| 11786 | lower_lastprivate_conditional_clauses (clauses: gimple_omp_for_clauses_ptr (gs: stmt), |
| 11787 | ctx); |
| 11788 | |
| 11789 | lower_rec_input_clauses (clauses: gimple_omp_for_clauses (gs: stmt), ilist: &body, dlist: &dlist, ctx, |
| 11790 | fd: fdp); |
| 11791 | gimple_seq_add_seq (rclauses ? &tred_ilist : &body, |
| 11792 | gimple_omp_for_pre_body (gs: stmt)); |
| 11793 | |
| 11794 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 11795 | |
| 11796 | gcall *private_marker = NULL; |
| 11797 | if (is_gimple_omp_oacc (stmt: ctx->stmt) |
| 11798 | && !gimple_seq_empty_p (s: omp_for_body)) |
| 11799 | private_marker = lower_oacc_private_marker (ctx); |
| 11800 | |
| 11801 | /* Lower the header expressions. At this point, we can assume that |
| 11802 | the header is of the form: |
| 11803 | |
| 11804 | #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3) |
| 11805 | |
| 11806 | We just need to make sure that VAL1, VAL2 and VAL3 are lowered |
| 11807 | using the .omp_data_s mapping, if needed. */ |
| 11808 | for (i = 0; i < gimple_omp_for_collapse (gs: stmt); i++) |
| 11809 | { |
| 11810 | rhs_p = gimple_omp_for_initial_ptr (gs: stmt, i); |
| 11811 | if (TREE_CODE (*rhs_p) == TREE_VEC) |
| 11812 | { |
| 11813 | if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1))) |
| 11814 | TREE_VEC_ELT (*rhs_p, 1) |
| 11815 | = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list); |
| 11816 | if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2))) |
| 11817 | TREE_VEC_ELT (*rhs_p, 2) |
| 11818 | = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list); |
| 11819 | } |
| 11820 | else if (!is_gimple_min_invariant (*rhs_p)) |
| 11821 | *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); |
| 11822 | else if (TREE_CODE (*rhs_p) == ADDR_EXPR) |
| 11823 | recompute_tree_invariant_for_addr_expr (*rhs_p); |
| 11824 | |
| 11825 | rhs_p = gimple_omp_for_final_ptr (gs: stmt, i); |
| 11826 | if (TREE_CODE (*rhs_p) == TREE_VEC) |
| 11827 | { |
| 11828 | if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1))) |
| 11829 | TREE_VEC_ELT (*rhs_p, 1) |
| 11830 | = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list); |
| 11831 | if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2))) |
| 11832 | TREE_VEC_ELT (*rhs_p, 2) |
| 11833 | = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list); |
| 11834 | } |
| 11835 | else if (!is_gimple_min_invariant (*rhs_p)) |
| 11836 | *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); |
| 11837 | else if (TREE_CODE (*rhs_p) == ADDR_EXPR) |
| 11838 | recompute_tree_invariant_for_addr_expr (*rhs_p); |
| 11839 | |
| 11840 | rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1); |
| 11841 | if (!is_gimple_min_invariant (*rhs_p)) |
| 11842 | *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); |
| 11843 | } |
| 11844 | if (rclauses) |
| 11845 | gimple_seq_add_seq (&tred_ilist, cnt_list); |
| 11846 | else |
| 11847 | gimple_seq_add_seq (&body, cnt_list); |
| 11848 | |
| 11849 | /* Once lowered, extract the bounds and clauses. */ |
| 11850 | omp_extract_for_data (for_stmt: stmt, fd: &fd, NULL); |
| 11851 | |
| 11852 | if (is_gimple_omp_oacc (stmt: ctx->stmt) |
| 11853 | && !ctx_in_oacc_kernels_region (ctx)) |
| 11854 | lower_oacc_head_tail (loc: gimple_location (g: stmt), |
| 11855 | clauses: gimple_omp_for_clauses (gs: stmt), private_marker, |
| 11856 | head: &oacc_head, tail: &oacc_tail, ctx); |
| 11857 | |
| 11858 | /* Add OpenACC partitioning and reduction markers just before the loop. */ |
| 11859 | if (oacc_head) |
| 11860 | gimple_seq_add_seq (&body, oacc_head); |
| 11861 | |
| 11862 | lower_omp_for_lastprivate (fd: &fd, body_p: &body, dlist: &dlist, clist: &clist, ctx); |
| 11863 | |
| 11864 | if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR) |
| 11865 | for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 11866 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
| 11867 | && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) |
| 11868 | { |
| 11869 | OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx); |
| 11870 | if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c))) |
| 11871 | OMP_CLAUSE_LINEAR_STEP (c) |
| 11872 | = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c), |
| 11873 | ctx); |
| 11874 | } |
| 11875 | |
| 11876 | if ((ctx->scan_inclusive || ctx->scan_exclusive) |
| 11877 | && gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR) |
| 11878 | lower_omp_for_scan (body_p: &body, dlist: &dlist, stmt, fd: &fd, ctx); |
| 11879 | else |
| 11880 | { |
| 11881 | gimple_seq_add_stmt (&body, stmt); |
| 11882 | gimple_seq_add_seq (&body, gimple_omp_body (gs: stmt)); |
| 11883 | } |
| 11884 | |
| 11885 | gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v, |
| 11886 | fd.loop.v)); |
| 11887 | |
| 11888 | /* After the loop, add exit clauses. */ |
| 11889 | lower_reduction_clauses (clauses: gimple_omp_for_clauses (gs: stmt), stmt_seqp: &body, clist: &clist, ctx); |
| 11890 | |
| 11891 | if (clist) |
| 11892 | { |
| 11893 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START); |
| 11894 | gcall *g = gimple_build_call (fndecl, 0); |
| 11895 | gimple_seq_add_stmt (&body, g); |
| 11896 | gimple_seq_add_seq (&body, clist); |
| 11897 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END); |
| 11898 | g = gimple_build_call (fndecl, 0); |
| 11899 | gimple_seq_add_stmt (&body, g); |
| 11900 | } |
| 11901 | |
| 11902 | if (ctx->cancellable) |
| 11903 | gimple_seq_add_stmt (&body, gimple_build_label (label: ctx->cancel_label)); |
| 11904 | |
| 11905 | gimple_seq_add_seq (&body, dlist); |
| 11906 | |
| 11907 | if (rclauses) |
| 11908 | { |
| 11909 | gimple_seq_add_seq (&tred_ilist, body); |
| 11910 | body = tred_ilist; |
| 11911 | } |
| 11912 | |
| 11913 | body = maybe_catch_exception (body); |
| 11914 | |
| 11915 | /* Region exit marker goes at the end of the loop body. */ |
| 11916 | gimple *g = gimple_build_omp_return (fd.have_nowait); |
| 11917 | gimple_seq_add_stmt (&body, g); |
| 11918 | |
| 11919 | gimple_seq_add_seq (&body, tred_dlist); |
| 11920 | |
| 11921 | maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &body); |
| 11922 | |
| 11923 | if (rclauses) |
| 11924 | OMP_CLAUSE_DECL (rclauses) = rtmp; |
| 11925 | |
| 11926 | /* Add OpenACC joining and reduction markers just after the loop. */ |
| 11927 | if (oacc_tail) |
| 11928 | gimple_seq_add_seq (&body, oacc_tail); |
| 11929 | |
| 11930 | pop_gimplify_context (new_stmt); |
| 11931 | |
| 11932 | gimple_bind_append_vars (bind_stmt: new_stmt, vars: ctx->block_vars); |
| 11933 | maybe_remove_omp_member_access_dummy_vars (bind: new_stmt); |
| 11934 | BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: new_stmt); |
| 11935 | if (BLOCK_VARS (block)) |
| 11936 | TREE_USED (block) = 1; |
| 11937 | |
| 11938 | gimple_bind_set_body (bind_stmt: new_stmt, seq: body); |
| 11939 | gimple_omp_set_body (gs: stmt, NULL); |
| 11940 | gimple_omp_for_set_pre_body (gs: stmt, NULL); |
| 11941 | } |
| 11942 | |
| 11943 | /* Callback for walk_stmts. Check if the current statement only contains |
| 11944 | GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */ |
| 11945 | |
| 11946 | static tree |
| 11947 | check_combined_parallel (gimple_stmt_iterator *gsi_p, |
| 11948 | bool *handled_ops_p, |
| 11949 | struct walk_stmt_info *wi) |
| 11950 | { |
| 11951 | int *info = (int *) wi->info; |
| 11952 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 11953 | |
| 11954 | *handled_ops_p = true; |
| 11955 | switch (gimple_code (g: stmt)) |
| 11956 | { |
| 11957 | WALK_SUBSTMTS; |
| 11958 | |
| 11959 | case GIMPLE_DEBUG: |
| 11960 | break; |
| 11961 | case GIMPLE_OMP_FOR: |
| 11962 | case GIMPLE_OMP_SECTIONS: |
| 11963 | *info = *info == 0 ? 1 : -1; |
| 11964 | break; |
| 11965 | default: |
| 11966 | *info = -1; |
| 11967 | break; |
| 11968 | } |
| 11969 | return NULL; |
| 11970 | } |
| 11971 | |
| 11972 | struct omp_taskcopy_context |
| 11973 | { |
| 11974 | /* This field must be at the beginning, as we do "inheritance": Some |
| 11975 | callback functions for tree-inline.cc (e.g., omp_copy_decl) |
| 11976 | receive a copy_body_data pointer that is up-casted to an |
| 11977 | omp_context pointer. */ |
| 11978 | copy_body_data cb; |
| 11979 | omp_context *ctx; |
| 11980 | }; |
| 11981 | |
| 11982 | static tree |
| 11983 | task_copyfn_copy_decl (tree var, copy_body_data *cb) |
| 11984 | { |
| 11985 | struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb; |
| 11986 | |
| 11987 | if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var)) |
| 11988 | return create_tmp_var (TREE_TYPE (var)); |
| 11989 | |
| 11990 | return var; |
| 11991 | } |
| 11992 | |
| 11993 | static tree |
| 11994 | task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type) |
| 11995 | { |
| 11996 | tree name, new_fields = NULL, type, f; |
| 11997 | |
| 11998 | type = lang_hooks.types.make_type (RECORD_TYPE); |
| 11999 | name = DECL_NAME (TYPE_NAME (orig_type)); |
| 12000 | name = build_decl (gimple_location (g: tcctx->ctx->stmt), |
| 12001 | TYPE_DECL, name, type); |
| 12002 | TYPE_NAME (type) = name; |
| 12003 | |
| 12004 | for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f)) |
| 12005 | { |
| 12006 | tree new_f = copy_node (f); |
| 12007 | DECL_CONTEXT (new_f) = type; |
| 12008 | TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), id: &tcctx->cb); |
| 12009 | TREE_CHAIN (new_f) = new_fields; |
| 12010 | walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL); |
| 12011 | walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL); |
| 12012 | walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r, |
| 12013 | &tcctx->cb, NULL); |
| 12014 | new_fields = new_f; |
| 12015 | tcctx->cb.decl_map->put (k: f, v: new_f); |
| 12016 | } |
| 12017 | TYPE_FIELDS (type) = nreverse (new_fields); |
| 12018 | layout_type (type); |
| 12019 | return type; |
| 12020 | } |
| 12021 | |
| 12022 | /* Create task copyfn. */ |
| 12023 | |
| 12024 | static void |
| 12025 | create_task_copyfn (gomp_task *task_stmt, omp_context *ctx) |
| 12026 | { |
| 12027 | struct function *child_cfun; |
| 12028 | tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl; |
| 12029 | tree record_type, srecord_type, bind, list; |
| 12030 | bool record_needs_remap = false, srecord_needs_remap = false; |
| 12031 | splay_tree_node n; |
| 12032 | struct omp_taskcopy_context tcctx; |
| 12033 | location_t loc = gimple_location (g: task_stmt); |
| 12034 | size_t looptempno = 0; |
| 12035 | |
| 12036 | child_fn = gimple_omp_task_copy_fn (gs: task_stmt); |
| 12037 | task_cpyfns.safe_push (obj: task_stmt); |
| 12038 | child_cfun = DECL_STRUCT_FUNCTION (child_fn); |
| 12039 | gcc_assert (child_cfun->cfg == NULL); |
| 12040 | DECL_SAVED_TREE (child_fn) = alloc_stmt_list (); |
| 12041 | |
| 12042 | /* Reset DECL_CONTEXT on function arguments. */ |
| 12043 | for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t)) |
| 12044 | DECL_CONTEXT (t) = child_fn; |
| 12045 | |
| 12046 | /* Populate the function. */ |
| 12047 | push_gimplify_context (); |
| 12048 | push_cfun (new_cfun: child_cfun); |
| 12049 | |
| 12050 | bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL); |
| 12051 | TREE_SIDE_EFFECTS (bind) = 1; |
| 12052 | list = NULL; |
| 12053 | DECL_SAVED_TREE (child_fn) = bind; |
| 12054 | DECL_SOURCE_LOCATION (child_fn) = gimple_location (g: task_stmt); |
| 12055 | |
| 12056 | /* Remap src and dst argument types if needed. */ |
| 12057 | record_type = ctx->record_type; |
| 12058 | srecord_type = ctx->srecord_type; |
| 12059 | for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f)) |
| 12060 | if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) |
| 12061 | { |
| 12062 | record_needs_remap = true; |
| 12063 | break; |
| 12064 | } |
| 12065 | for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f)) |
| 12066 | if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) |
| 12067 | { |
| 12068 | srecord_needs_remap = true; |
| 12069 | break; |
| 12070 | } |
| 12071 | |
| 12072 | if (record_needs_remap || srecord_needs_remap) |
| 12073 | { |
| 12074 | memset (s: &tcctx, c: '\0', n: sizeof (tcctx)); |
| 12075 | tcctx.cb.src_fn = ctx->cb.src_fn; |
| 12076 | tcctx.cb.dst_fn = child_fn; |
| 12077 | tcctx.cb.src_node = cgraph_node::get (decl: tcctx.cb.src_fn); |
| 12078 | gcc_checking_assert (tcctx.cb.src_node); |
| 12079 | tcctx.cb.dst_node = tcctx.cb.src_node; |
| 12080 | tcctx.cb.src_cfun = ctx->cb.src_cfun; |
| 12081 | tcctx.cb.copy_decl = task_copyfn_copy_decl; |
| 12082 | tcctx.cb.eh_lp_nr = 0; |
| 12083 | tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE; |
| 12084 | tcctx.cb.decl_map = new hash_map<tree, tree>; |
| 12085 | tcctx.ctx = ctx; |
| 12086 | |
| 12087 | if (record_needs_remap) |
| 12088 | record_type = task_copyfn_remap_type (tcctx: &tcctx, orig_type: record_type); |
| 12089 | if (srecord_needs_remap) |
| 12090 | srecord_type = task_copyfn_remap_type (tcctx: &tcctx, orig_type: srecord_type); |
| 12091 | } |
| 12092 | else |
| 12093 | tcctx.cb.decl_map = NULL; |
| 12094 | |
| 12095 | arg = DECL_ARGUMENTS (child_fn); |
| 12096 | TREE_TYPE (arg) = build_pointer_type (record_type); |
| 12097 | sarg = DECL_CHAIN (arg); |
| 12098 | TREE_TYPE (sarg) = build_pointer_type (srecord_type); |
| 12099 | |
| 12100 | /* First pass: initialize temporaries used in record_type and srecord_type |
| 12101 | sizes and field offsets. */ |
| 12102 | if (tcctx.cb.decl_map) |
| 12103 | for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 12104 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 12105 | { |
| 12106 | tree *p; |
| 12107 | |
| 12108 | decl = OMP_CLAUSE_DECL (c); |
| 12109 | p = tcctx.cb.decl_map->get (k: decl); |
| 12110 | if (p == NULL) |
| 12111 | continue; |
| 12112 | n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); |
| 12113 | sf = (tree) n->value; |
| 12114 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12115 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12116 | src = omp_build_component_ref (obj: src, field: sf); |
| 12117 | t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src); |
| 12118 | append_to_statement_list (t, &list); |
| 12119 | } |
| 12120 | |
| 12121 | /* Second pass: copy shared var pointers and copy construct non-VLA |
| 12122 | firstprivate vars. */ |
| 12123 | for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 12124 | switch (OMP_CLAUSE_CODE (c)) |
| 12125 | { |
| 12126 | splay_tree_key key; |
| 12127 | case OMP_CLAUSE_SHARED: |
| 12128 | decl = OMP_CLAUSE_DECL (c); |
| 12129 | key = (splay_tree_key) decl; |
| 12130 | if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) |
| 12131 | key = (splay_tree_key) &DECL_UID (decl); |
| 12132 | n = splay_tree_lookup (ctx->field_map, key); |
| 12133 | if (n == NULL) |
| 12134 | break; |
| 12135 | f = (tree) n->value; |
| 12136 | if (tcctx.cb.decl_map) |
| 12137 | f = *tcctx.cb.decl_map->get (k: f); |
| 12138 | n = splay_tree_lookup (ctx->sfield_map, key); |
| 12139 | sf = (tree) n->value; |
| 12140 | if (tcctx.cb.decl_map) |
| 12141 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12142 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12143 | src = omp_build_component_ref (obj: src, field: sf); |
| 12144 | dst = build_simple_mem_ref_loc (loc, arg); |
| 12145 | dst = omp_build_component_ref (obj: dst, field: f); |
| 12146 | t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
| 12147 | append_to_statement_list (t, &list); |
| 12148 | break; |
| 12149 | case OMP_CLAUSE_REDUCTION: |
| 12150 | case OMP_CLAUSE_IN_REDUCTION: |
| 12151 | decl = OMP_CLAUSE_DECL (c); |
| 12152 | if (TREE_CODE (decl) == MEM_REF) |
| 12153 | { |
| 12154 | decl = TREE_OPERAND (decl, 0); |
| 12155 | if (TREE_CODE (decl) == POINTER_PLUS_EXPR) |
| 12156 | decl = TREE_OPERAND (decl, 0); |
| 12157 | if (TREE_CODE (decl) == INDIRECT_REF |
| 12158 | || TREE_CODE (decl) == ADDR_EXPR) |
| 12159 | decl = TREE_OPERAND (decl, 0); |
| 12160 | } |
| 12161 | key = (splay_tree_key) decl; |
| 12162 | n = splay_tree_lookup (ctx->field_map, key); |
| 12163 | if (n == NULL) |
| 12164 | break; |
| 12165 | f = (tree) n->value; |
| 12166 | if (tcctx.cb.decl_map) |
| 12167 | f = *tcctx.cb.decl_map->get (k: f); |
| 12168 | n = splay_tree_lookup (ctx->sfield_map, key); |
| 12169 | sf = (tree) n->value; |
| 12170 | if (tcctx.cb.decl_map) |
| 12171 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12172 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12173 | src = omp_build_component_ref (obj: src, field: sf); |
| 12174 | if (decl != OMP_CLAUSE_DECL (c) |
| 12175 | && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE |
| 12176 | && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE) |
| 12177 | src = build_simple_mem_ref_loc (loc, src); |
| 12178 | dst = build_simple_mem_ref_loc (loc, arg); |
| 12179 | dst = omp_build_component_ref (obj: dst, field: f); |
| 12180 | t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
| 12181 | append_to_statement_list (t, &list); |
| 12182 | break; |
| 12183 | case OMP_CLAUSE__LOOPTEMP_: |
| 12184 | /* Fields for first two _looptemp_ clauses are initialized by |
| 12185 | GOMP_taskloop*, the rest are handled like firstprivate. */ |
| 12186 | if (looptempno < 2) |
| 12187 | { |
| 12188 | looptempno++; |
| 12189 | break; |
| 12190 | } |
| 12191 | /* FALLTHRU */ |
| 12192 | case OMP_CLAUSE__REDUCTEMP_: |
| 12193 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 12194 | decl = OMP_CLAUSE_DECL (c); |
| 12195 | if (is_variable_sized (expr: decl)) |
| 12196 | break; |
| 12197 | n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); |
| 12198 | if (n == NULL) |
| 12199 | break; |
| 12200 | f = (tree) n->value; |
| 12201 | if (tcctx.cb.decl_map) |
| 12202 | f = *tcctx.cb.decl_map->get (k: f); |
| 12203 | n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); |
| 12204 | if (n != NULL) |
| 12205 | { |
| 12206 | sf = (tree) n->value; |
| 12207 | if (tcctx.cb.decl_map) |
| 12208 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12209 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12210 | src = omp_build_component_ref (obj: src, field: sf); |
| 12211 | if (use_pointer_for_field (decl, NULL) |
| 12212 | || omp_privatize_by_reference (decl)) |
| 12213 | src = build_simple_mem_ref_loc (loc, src); |
| 12214 | } |
| 12215 | else |
| 12216 | src = decl; |
| 12217 | dst = build_simple_mem_ref_loc (loc, arg); |
| 12218 | dst = omp_build_component_ref (obj: dst, field: f); |
| 12219 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) |
| 12220 | t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
| 12221 | else |
| 12222 | { |
| 12223 | if (ctx->allocate_map) |
| 12224 | if (tree *allocatorp = ctx->allocate_map->get (k: decl)) |
| 12225 | { |
| 12226 | tree allocator = *allocatorp; |
| 12227 | HOST_WIDE_INT ialign = 0; |
| 12228 | if (TREE_CODE (allocator) == TREE_LIST) |
| 12229 | { |
| 12230 | ialign = tree_to_uhwi (TREE_VALUE (allocator)); |
| 12231 | allocator = TREE_PURPOSE (allocator); |
| 12232 | } |
| 12233 | if (TREE_CODE (allocator) != INTEGER_CST) |
| 12234 | { |
| 12235 | n = splay_tree_lookup (ctx->sfield_map, |
| 12236 | (splay_tree_key) allocator); |
| 12237 | allocator = (tree) n->value; |
| 12238 | if (tcctx.cb.decl_map) |
| 12239 | allocator = *tcctx.cb.decl_map->get (k: allocator); |
| 12240 | tree a = build_simple_mem_ref_loc (loc, sarg); |
| 12241 | allocator = omp_build_component_ref (obj: a, field: allocator); |
| 12242 | } |
| 12243 | allocator = fold_convert (pointer_sized_int_node, allocator); |
| 12244 | tree a = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC); |
| 12245 | tree align = build_int_cst (size_type_node, |
| 12246 | MAX (ialign, |
| 12247 | DECL_ALIGN_UNIT (decl))); |
| 12248 | tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst))); |
| 12249 | tree ptr = build_call_expr_loc (loc, a, 3, align, sz, |
| 12250 | allocator); |
| 12251 | ptr = fold_convert (TREE_TYPE (dst), ptr); |
| 12252 | t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr); |
| 12253 | append_to_statement_list (t, &list); |
| 12254 | dst = build_simple_mem_ref_loc (loc, dst); |
| 12255 | } |
| 12256 | t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src); |
| 12257 | } |
| 12258 | append_to_statement_list (t, &list); |
| 12259 | break; |
| 12260 | case OMP_CLAUSE_PRIVATE: |
| 12261 | if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c)) |
| 12262 | break; |
| 12263 | decl = OMP_CLAUSE_DECL (c); |
| 12264 | n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); |
| 12265 | f = (tree) n->value; |
| 12266 | if (tcctx.cb.decl_map) |
| 12267 | f = *tcctx.cb.decl_map->get (k: f); |
| 12268 | n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); |
| 12269 | if (n != NULL) |
| 12270 | { |
| 12271 | sf = (tree) n->value; |
| 12272 | if (tcctx.cb.decl_map) |
| 12273 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12274 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12275 | src = omp_build_component_ref (obj: src, field: sf); |
| 12276 | if (use_pointer_for_field (decl, NULL)) |
| 12277 | src = build_simple_mem_ref_loc (loc, src); |
| 12278 | } |
| 12279 | else |
| 12280 | src = decl; |
| 12281 | dst = build_simple_mem_ref_loc (loc, arg); |
| 12282 | dst = omp_build_component_ref (obj: dst, field: f); |
| 12283 | t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
| 12284 | append_to_statement_list (t, &list); |
| 12285 | break; |
| 12286 | default: |
| 12287 | break; |
| 12288 | } |
| 12289 | |
| 12290 | /* Last pass: handle VLA firstprivates. */ |
| 12291 | if (tcctx.cb.decl_map) |
| 12292 | for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
| 12293 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 12294 | { |
| 12295 | tree ind, ptr, df; |
| 12296 | |
| 12297 | decl = OMP_CLAUSE_DECL (c); |
| 12298 | if (!is_variable_sized (expr: decl)) |
| 12299 | continue; |
| 12300 | n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); |
| 12301 | if (n == NULL) |
| 12302 | continue; |
| 12303 | f = (tree) n->value; |
| 12304 | f = *tcctx.cb.decl_map->get (k: f); |
| 12305 | gcc_assert (DECL_HAS_VALUE_EXPR_P (decl)); |
| 12306 | ind = DECL_VALUE_EXPR (decl); |
| 12307 | gcc_assert (TREE_CODE (ind) == INDIRECT_REF); |
| 12308 | gcc_assert (DECL_P (TREE_OPERAND (ind, 0))); |
| 12309 | n = splay_tree_lookup (ctx->sfield_map, |
| 12310 | (splay_tree_key) TREE_OPERAND (ind, 0)); |
| 12311 | sf = (tree) n->value; |
| 12312 | sf = *tcctx.cb.decl_map->get (k: sf); |
| 12313 | src = build_simple_mem_ref_loc (loc, sarg); |
| 12314 | src = omp_build_component_ref (obj: src, field: sf); |
| 12315 | src = build_simple_mem_ref_loc (loc, src); |
| 12316 | dst = build_simple_mem_ref_loc (loc, arg); |
| 12317 | dst = omp_build_component_ref (obj: dst, field: f); |
| 12318 | t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src); |
| 12319 | append_to_statement_list (t, &list); |
| 12320 | n = splay_tree_lookup (ctx->field_map, |
| 12321 | (splay_tree_key) TREE_OPERAND (ind, 0)); |
| 12322 | df = (tree) n->value; |
| 12323 | df = *tcctx.cb.decl_map->get (k: df); |
| 12324 | ptr = build_simple_mem_ref_loc (loc, arg); |
| 12325 | ptr = omp_build_component_ref (obj: ptr, field: df); |
| 12326 | t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr, |
| 12327 | build_fold_addr_expr_loc (loc, dst)); |
| 12328 | append_to_statement_list (t, &list); |
| 12329 | } |
| 12330 | |
| 12331 | t = build1 (RETURN_EXPR, void_type_node, NULL); |
| 12332 | append_to_statement_list (t, &list); |
| 12333 | |
| 12334 | if (tcctx.cb.decl_map) |
| 12335 | delete tcctx.cb.decl_map; |
| 12336 | pop_gimplify_context (NULL); |
| 12337 | BIND_EXPR_BODY (bind) = list; |
| 12338 | pop_cfun (); |
| 12339 | } |
| 12340 | |
| 12341 | static void |
| 12342 | lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq) |
| 12343 | { |
| 12344 | tree c, clauses; |
| 12345 | gimple *g; |
| 12346 | size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i; |
| 12347 | |
| 12348 | clauses = omp_find_clause (clauses: *pclauses, kind: OMP_CLAUSE_DEPEND); |
| 12349 | gcc_assert (clauses); |
| 12350 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 12351 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) |
| 12352 | switch (OMP_CLAUSE_DEPEND_KIND (c)) |
| 12353 | { |
| 12354 | case OMP_CLAUSE_DEPEND_LAST: |
| 12355 | /* Lowering already done at gimplification. */ |
| 12356 | return; |
| 12357 | case OMP_CLAUSE_DEPEND_IN: |
| 12358 | cnt[2]++; |
| 12359 | break; |
| 12360 | case OMP_CLAUSE_DEPEND_OUT: |
| 12361 | case OMP_CLAUSE_DEPEND_INOUT: |
| 12362 | cnt[0]++; |
| 12363 | break; |
| 12364 | case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: |
| 12365 | cnt[1]++; |
| 12366 | break; |
| 12367 | case OMP_CLAUSE_DEPEND_DEPOBJ: |
| 12368 | cnt[3]++; |
| 12369 | break; |
| 12370 | case OMP_CLAUSE_DEPEND_INOUTSET: |
| 12371 | cnt[4]++; |
| 12372 | break; |
| 12373 | default: |
| 12374 | gcc_unreachable (); |
| 12375 | } |
| 12376 | if (cnt[1] || cnt[3] || cnt[4]) |
| 12377 | idx = 5; |
| 12378 | size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4]; |
| 12379 | size_t inoutidx = total + idx; |
| 12380 | tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]); |
| 12381 | tree array = create_tmp_var (type); |
| 12382 | TREE_ADDRESSABLE (array) = 1; |
| 12383 | tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE, |
| 12384 | NULL_TREE); |
| 12385 | if (idx == 5) |
| 12386 | { |
| 12387 | g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0)); |
| 12388 | gimple_seq_add_stmt (iseq, g); |
| 12389 | r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE, |
| 12390 | NULL_TREE); |
| 12391 | } |
| 12392 | g = gimple_build_assign (r, build_int_cst (ptr_type_node, total)); |
| 12393 | gimple_seq_add_stmt (iseq, g); |
| 12394 | for (i = 0; i < (idx == 5 ? 3 : 1); i++) |
| 12395 | { |
| 12396 | r = build4 (ARRAY_REF, ptr_type_node, array, |
| 12397 | size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE); |
| 12398 | g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i])); |
| 12399 | gimple_seq_add_stmt (iseq, g); |
| 12400 | } |
| 12401 | for (i = 0; i < 5; i++) |
| 12402 | { |
| 12403 | if (cnt[i] == 0) |
| 12404 | continue; |
| 12405 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 12406 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
| 12407 | continue; |
| 12408 | else |
| 12409 | { |
| 12410 | switch (OMP_CLAUSE_DEPEND_KIND (c)) |
| 12411 | { |
| 12412 | case OMP_CLAUSE_DEPEND_IN: |
| 12413 | if (i != 2) |
| 12414 | continue; |
| 12415 | break; |
| 12416 | case OMP_CLAUSE_DEPEND_OUT: |
| 12417 | case OMP_CLAUSE_DEPEND_INOUT: |
| 12418 | if (i != 0) |
| 12419 | continue; |
| 12420 | break; |
| 12421 | case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: |
| 12422 | if (i != 1) |
| 12423 | continue; |
| 12424 | break; |
| 12425 | case OMP_CLAUSE_DEPEND_DEPOBJ: |
| 12426 | if (i != 3) |
| 12427 | continue; |
| 12428 | break; |
| 12429 | case OMP_CLAUSE_DEPEND_INOUTSET: |
| 12430 | if (i != 4) |
| 12431 | continue; |
| 12432 | break; |
| 12433 | default: |
| 12434 | gcc_unreachable (); |
| 12435 | } |
| 12436 | tree t = OMP_CLAUSE_DECL (c); |
| 12437 | if (i == 4) |
| 12438 | { |
| 12439 | t = build4 (ARRAY_REF, ptr_type_node, array, |
| 12440 | size_int (inoutidx), NULL_TREE, NULL_TREE); |
| 12441 | t = build_fold_addr_expr (t); |
| 12442 | inoutidx += 2; |
| 12443 | } |
| 12444 | t = fold_convert (ptr_type_node, t); |
| 12445 | gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue); |
| 12446 | r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++), |
| 12447 | NULL_TREE, NULL_TREE); |
| 12448 | g = gimple_build_assign (r, t); |
| 12449 | gimple_seq_add_stmt (iseq, g); |
| 12450 | } |
| 12451 | } |
| 12452 | if (cnt[4]) |
| 12453 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 12454 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
| 12455 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET) |
| 12456 | { |
| 12457 | tree t = OMP_CLAUSE_DECL (c); |
| 12458 | t = fold_convert (ptr_type_node, t); |
| 12459 | gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue); |
| 12460 | r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++), |
| 12461 | NULL_TREE, NULL_TREE); |
| 12462 | g = gimple_build_assign (r, t); |
| 12463 | gimple_seq_add_stmt (iseq, g); |
| 12464 | t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET); |
| 12465 | r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++), |
| 12466 | NULL_TREE, NULL_TREE); |
| 12467 | g = gimple_build_assign (r, t); |
| 12468 | gimple_seq_add_stmt (iseq, g); |
| 12469 | } |
| 12470 | |
| 12471 | c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND); |
| 12472 | OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST; |
| 12473 | OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array); |
| 12474 | OMP_CLAUSE_CHAIN (c) = *pclauses; |
| 12475 | *pclauses = c; |
| 12476 | tree clobber = build_clobber (type); |
| 12477 | g = gimple_build_assign (array, clobber); |
| 12478 | gimple_seq_add_stmt (oseq, g); |
| 12479 | } |
| 12480 | |
| 12481 | /* Lower the OpenMP parallel or task directive in the current statement |
| 12482 | in GSI_P. CTX holds context information for the directive. */ |
| 12483 | |
| 12484 | static void |
| 12485 | lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 12486 | { |
| 12487 | tree clauses; |
| 12488 | tree child_fn, t; |
| 12489 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 12490 | gbind *par_bind, *bind, *dep_bind = NULL; |
| 12491 | gimple_seq par_body; |
| 12492 | location_t loc = gimple_location (g: stmt); |
| 12493 | |
| 12494 | clauses = gimple_omp_taskreg_clauses (gs: stmt); |
| 12495 | if (gimple_code (g: stmt) == GIMPLE_OMP_TASK |
| 12496 | && gimple_omp_task_taskwait_p (g: stmt)) |
| 12497 | { |
| 12498 | par_bind = NULL; |
| 12499 | par_body = NULL; |
| 12500 | } |
| 12501 | else |
| 12502 | { |
| 12503 | par_bind |
| 12504 | = as_a <gbind *> (p: gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt))); |
| 12505 | par_body = gimple_bind_body (gs: par_bind); |
| 12506 | } |
| 12507 | child_fn = ctx->cb.dst_fn; |
| 12508 | if (gimple_code (g: stmt) == GIMPLE_OMP_PARALLEL |
| 12509 | && !gimple_omp_parallel_combined_p (g: stmt)) |
| 12510 | { |
| 12511 | struct walk_stmt_info wi; |
| 12512 | int ws_num = 0; |
| 12513 | |
| 12514 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 12515 | wi.info = &ws_num; |
| 12516 | wi.val_only = true; |
| 12517 | walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi); |
| 12518 | if (ws_num == 1) |
| 12519 | gimple_omp_parallel_set_combined_p (g: stmt, combined_p: true); |
| 12520 | } |
| 12521 | gimple_seq dep_ilist = NULL; |
| 12522 | gimple_seq dep_olist = NULL; |
| 12523 | if (gimple_code (g: stmt) == GIMPLE_OMP_TASK |
| 12524 | && omp_find_clause (clauses, kind: OMP_CLAUSE_DEPEND)) |
| 12525 | { |
| 12526 | push_gimplify_context (); |
| 12527 | dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); |
| 12528 | lower_depend_clauses (pclauses: gimple_omp_task_clauses_ptr (gs: stmt), |
| 12529 | iseq: &dep_ilist, oseq: &dep_olist); |
| 12530 | } |
| 12531 | |
| 12532 | if (gimple_code (g: stmt) == GIMPLE_OMP_TASK |
| 12533 | && gimple_omp_task_taskwait_p (g: stmt)) |
| 12534 | { |
| 12535 | if (dep_bind) |
| 12536 | { |
| 12537 | gsi_replace (gsi_p, dep_bind, true); |
| 12538 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist); |
| 12539 | gimple_bind_add_stmt (bind_stmt: dep_bind, stmt); |
| 12540 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist); |
| 12541 | pop_gimplify_context (dep_bind); |
| 12542 | } |
| 12543 | return; |
| 12544 | } |
| 12545 | |
| 12546 | if (ctx->srecord_type) |
| 12547 | create_task_copyfn (task_stmt: as_a <gomp_task *> (p: stmt), ctx); |
| 12548 | |
| 12549 | gimple_seq tskred_ilist = NULL; |
| 12550 | gimple_seq tskred_olist = NULL; |
| 12551 | if ((is_task_ctx (ctx) |
| 12552 | && gimple_omp_task_taskloop_p (g: ctx->stmt) |
| 12553 | && omp_find_clause (clauses: gimple_omp_task_clauses (gs: ctx->stmt), |
| 12554 | kind: OMP_CLAUSE_REDUCTION)) |
| 12555 | || (is_parallel_ctx (ctx) |
| 12556 | && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt), |
| 12557 | kind: OMP_CLAUSE__REDUCTEMP_))) |
| 12558 | { |
| 12559 | if (dep_bind == NULL) |
| 12560 | { |
| 12561 | push_gimplify_context (); |
| 12562 | dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); |
| 12563 | } |
| 12564 | lower_omp_task_reductions (ctx, code: is_task_ctx (ctx) ? OMP_TASKLOOP |
| 12565 | : OMP_PARALLEL, |
| 12566 | clauses: gimple_omp_taskreg_clauses (gs: ctx->stmt), |
| 12567 | start: &tskred_ilist, end: &tskred_olist); |
| 12568 | } |
| 12569 | |
| 12570 | push_gimplify_context (); |
| 12571 | |
| 12572 | gimple_seq par_olist = NULL; |
| 12573 | gimple_seq par_ilist = NULL; |
| 12574 | gimple_seq par_rlist = NULL; |
| 12575 | lower_rec_input_clauses (clauses, ilist: &par_ilist, dlist: &par_olist, ctx, NULL); |
| 12576 | lower_omp (&par_body, ctx); |
| 12577 | if (gimple_code (g: stmt) != GIMPLE_OMP_TASK) |
| 12578 | lower_reduction_clauses (clauses, stmt_seqp: &par_rlist, NULL, ctx); |
| 12579 | |
| 12580 | /* Declare all the variables created by mapping and the variables |
| 12581 | declared in the scope of the parallel body. */ |
| 12582 | record_vars_into (ctx->block_vars, child_fn); |
| 12583 | maybe_remove_omp_member_access_dummy_vars (bind: par_bind); |
| 12584 | record_vars_into (gimple_bind_vars (bind_stmt: par_bind), child_fn); |
| 12585 | |
| 12586 | if (ctx->record_type) |
| 12587 | { |
| 12588 | ctx->sender_decl |
| 12589 | = create_tmp_var (ctx->srecord_type ? ctx->srecord_type |
| 12590 | : ctx->record_type, ".omp_data_o" ); |
| 12591 | DECL_NAMELESS (ctx->sender_decl) = 1; |
| 12592 | TREE_ADDRESSABLE (ctx->sender_decl) = 1; |
| 12593 | gimple_omp_taskreg_set_data_arg (gs: stmt, data_arg: ctx->sender_decl); |
| 12594 | } |
| 12595 | |
| 12596 | gimple_seq olist = NULL; |
| 12597 | gimple_seq ilist = NULL; |
| 12598 | lower_send_clauses (clauses, ilist: &ilist, olist: &olist, ctx); |
| 12599 | lower_send_shared_vars (ilist: &ilist, olist: &olist, ctx); |
| 12600 | |
| 12601 | if (ctx->record_type) |
| 12602 | { |
| 12603 | tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl)); |
| 12604 | gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl, |
| 12605 | clobber)); |
| 12606 | } |
| 12607 | |
| 12608 | /* Once all the expansions are done, sequence all the different |
| 12609 | fragments inside gimple_omp_body. */ |
| 12610 | |
| 12611 | gimple_seq new_body = NULL; |
| 12612 | |
| 12613 | if (ctx->record_type) |
| 12614 | { |
| 12615 | t = build_fold_addr_expr_loc (loc, ctx->sender_decl); |
| 12616 | /* fixup_child_record_type might have changed receiver_decl's type. */ |
| 12617 | t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t); |
| 12618 | gimple_seq_add_stmt (&new_body, |
| 12619 | gimple_build_assign (ctx->receiver_decl, t)); |
| 12620 | } |
| 12621 | |
| 12622 | gimple_seq_add_seq (&new_body, par_ilist); |
| 12623 | gimple_seq_add_seq (&new_body, par_body); |
| 12624 | gimple_seq_add_seq (&new_body, par_rlist); |
| 12625 | if (ctx->cancellable) |
| 12626 | gimple_seq_add_stmt (&new_body, gimple_build_label (label: ctx->cancel_label)); |
| 12627 | gimple_seq_add_seq (&new_body, par_olist); |
| 12628 | new_body = maybe_catch_exception (body: new_body); |
| 12629 | if (gimple_code (g: stmt) == GIMPLE_OMP_TASK) |
| 12630 | gimple_seq_add_stmt (&new_body, |
| 12631 | gimple_build_omp_continue (integer_zero_node, |
| 12632 | integer_zero_node)); |
| 12633 | gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false)); |
| 12634 | gimple_omp_set_body (gs: stmt, body: new_body); |
| 12635 | |
| 12636 | if (dep_bind && gimple_bind_block (bind_stmt: par_bind) == NULL_TREE) |
| 12637 | bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); |
| 12638 | else |
| 12639 | bind = gimple_build_bind (NULL, NULL, gimple_bind_block (bind_stmt: par_bind)); |
| 12640 | gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true); |
| 12641 | gimple_bind_add_seq (bind_stmt: bind, seq: ilist); |
| 12642 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 12643 | gimple_bind_add_seq (bind_stmt: bind, seq: olist); |
| 12644 | |
| 12645 | pop_gimplify_context (NULL); |
| 12646 | |
| 12647 | if (dep_bind) |
| 12648 | { |
| 12649 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist); |
| 12650 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: tskred_ilist); |
| 12651 | gimple_bind_add_stmt (bind_stmt: dep_bind, stmt: bind); |
| 12652 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: tskred_olist); |
| 12653 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist); |
| 12654 | pop_gimplify_context (dep_bind); |
| 12655 | } |
| 12656 | } |
| 12657 | |
| 12658 | /* Set EXPR as the hostaddr expression that should result from the clause C |
| 12659 | in the target statement STMT. Returns the tree that should be |
| 12660 | passed as the hostaddr (a pointer to the array containing the expanded |
| 12661 | hostaddrs and sizes of the clause). */ |
| 12662 | |
| 12663 | static tree |
| 12664 | lower_omp_map_iterator_expr (tree expr, tree c, gomp_target *stmt) |
| 12665 | { |
| 12666 | if (!OMP_CLAUSE_HAS_ITERATORS (c)) |
| 12667 | return expr; |
| 12668 | |
| 12669 | tree iterator = OMP_CLAUSE_ITERATORS (c); |
| 12670 | tree elems = TREE_VEC_ELT (iterator, 7); |
| 12671 | tree index = TREE_VEC_ELT (iterator, 8); |
| 12672 | gimple_seq *loop_body_p = enter_omp_iterator_loop_context (c, stmt); |
| 12673 | |
| 12674 | /* IN LOOP BODY: */ |
| 12675 | /* elems[idx] = <expr>; */ |
| 12676 | tree lhs = build4 (ARRAY_REF, ptr_type_node, elems, index, |
| 12677 | NULL_TREE, NULL_TREE); |
| 12678 | tree mod_expr = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR, |
| 12679 | void_type_node, arg0: lhs, arg1: expr); |
| 12680 | gimplify_and_add (mod_expr, loop_body_p); |
| 12681 | exit_omp_iterator_loop_context (c); |
| 12682 | |
| 12683 | return build_fold_addr_expr_with_type (elems, ptr_type_node); |
| 12684 | } |
| 12685 | |
| 12686 | /* Set SIZE as the size expression that should result from the clause C |
| 12687 | in the target statement STMT. Returns the tree that should be |
| 12688 | passed as the clause size (a size_int with the value SIZE_MAX, indicating |
| 12689 | that the clause uses an iterator). */ |
| 12690 | |
| 12691 | static tree |
| 12692 | lower_omp_map_iterator_size (tree size, tree c, gomp_target *stmt) |
| 12693 | { |
| 12694 | if (!OMP_CLAUSE_HAS_ITERATORS (c)) |
| 12695 | return size; |
| 12696 | |
| 12697 | tree iterator = OMP_CLAUSE_ITERATORS (c); |
| 12698 | tree elems = TREE_VEC_ELT (iterator, 7); |
| 12699 | tree index = TREE_VEC_ELT (iterator, 8); |
| 12700 | gimple_seq *loop_body_p = enter_omp_iterator_loop_context (c, stmt); |
| 12701 | |
| 12702 | /* IN LOOP BODY: */ |
| 12703 | /* elems[idx+1] = <size>; */ |
| 12704 | tree lhs = build4 (ARRAY_REF, ptr_type_node, elems, |
| 12705 | size_binop (PLUS_EXPR, index, size_int (1)), |
| 12706 | NULL_TREE, NULL_TREE); |
| 12707 | tree mod_expr = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR, |
| 12708 | void_type_node, arg0: lhs, arg1: size); |
| 12709 | gimplify_and_add (mod_expr, loop_body_p); |
| 12710 | exit_omp_iterator_loop_context (c); |
| 12711 | |
| 12712 | return size_int (SIZE_MAX); |
| 12713 | } |
| 12714 | |
| 12715 | /* Lower the GIMPLE_OMP_TARGET in the current statement |
| 12716 | in GSI_P. CTX holds context information for the directive. */ |
| 12717 | |
| 12718 | static void |
| 12719 | lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 12720 | { |
| 12721 | tree clauses; |
| 12722 | tree child_fn, t, c; |
| 12723 | gomp_target *stmt = as_a <gomp_target *> (p: gsi_stmt (i: *gsi_p)); |
| 12724 | gbind *tgt_bind, *bind, *dep_bind = NULL; |
| 12725 | gimple_seq tgt_body, olist, ilist, fplist, new_body; |
| 12726 | location_t loc = gimple_location (g: stmt); |
| 12727 | bool offloaded, data_region; |
| 12728 | unsigned int map_cnt = 0; |
| 12729 | tree in_reduction_clauses = NULL_TREE; |
| 12730 | |
| 12731 | tree deep_map_cnt = NULL_TREE; |
| 12732 | tree deep_map_data = NULL_TREE; |
| 12733 | tree deep_map_offset_data = NULL_TREE; |
| 12734 | tree deep_map_offset = NULL_TREE; |
| 12735 | |
| 12736 | offloaded = is_gimple_omp_offloaded (stmt); |
| 12737 | switch (gimple_omp_target_kind (g: stmt)) |
| 12738 | { |
| 12739 | case GF_OMP_TARGET_KIND_REGION: |
| 12740 | tree *p, *q; |
| 12741 | q = &in_reduction_clauses; |
| 12742 | for (p = gimple_omp_target_clauses_ptr (gs: stmt); *p; ) |
| 12743 | if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION) |
| 12744 | { |
| 12745 | *q = *p; |
| 12746 | q = &OMP_CLAUSE_CHAIN (*q); |
| 12747 | *p = OMP_CLAUSE_CHAIN (*p); |
| 12748 | } |
| 12749 | else |
| 12750 | p = &OMP_CLAUSE_CHAIN (*p); |
| 12751 | *q = NULL_TREE; |
| 12752 | *p = in_reduction_clauses; |
| 12753 | /* FALLTHRU */ |
| 12754 | case GF_OMP_TARGET_KIND_UPDATE: |
| 12755 | case GF_OMP_TARGET_KIND_ENTER_DATA: |
| 12756 | case GF_OMP_TARGET_KIND_EXIT_DATA: |
| 12757 | case GF_OMP_TARGET_KIND_OACC_PARALLEL: |
| 12758 | case GF_OMP_TARGET_KIND_OACC_KERNELS: |
| 12759 | case GF_OMP_TARGET_KIND_OACC_SERIAL: |
| 12760 | case GF_OMP_TARGET_KIND_OACC_UPDATE: |
| 12761 | case GF_OMP_TARGET_KIND_OACC_ENTER_DATA: |
| 12762 | case GF_OMP_TARGET_KIND_OACC_EXIT_DATA: |
| 12763 | case GF_OMP_TARGET_KIND_OACC_DECLARE: |
| 12764 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED: |
| 12765 | case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE: |
| 12766 | data_region = false; |
| 12767 | break; |
| 12768 | case GF_OMP_TARGET_KIND_DATA: |
| 12769 | case GF_OMP_TARGET_KIND_OACC_DATA: |
| 12770 | case GF_OMP_TARGET_KIND_OACC_HOST_DATA: |
| 12771 | case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS: |
| 12772 | data_region = true; |
| 12773 | break; |
| 12774 | default: |
| 12775 | gcc_unreachable (); |
| 12776 | } |
| 12777 | |
| 12778 | /* Ensure that requires map is written via output_offload_tables, even if only |
| 12779 | 'target (enter/exit) data' is used in the translation unit. */ |
| 12780 | if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED)) |
| 12781 | g->have_offload = true; |
| 12782 | |
| 12783 | clauses = gimple_omp_target_clauses (gs: stmt); |
| 12784 | |
| 12785 | gimple_seq dep_ilist = NULL; |
| 12786 | gimple_seq dep_olist = NULL; |
| 12787 | bool has_depend = omp_find_clause (clauses, kind: OMP_CLAUSE_DEPEND) != NULL_TREE; |
| 12788 | if (has_depend || in_reduction_clauses) |
| 12789 | { |
| 12790 | push_gimplify_context (); |
| 12791 | dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); |
| 12792 | if (has_depend) |
| 12793 | lower_depend_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt), |
| 12794 | iseq: &dep_ilist, oseq: &dep_olist); |
| 12795 | if (in_reduction_clauses) |
| 12796 | lower_rec_input_clauses (clauses: in_reduction_clauses, ilist: &dep_ilist, dlist: &dep_olist, |
| 12797 | ctx, NULL); |
| 12798 | } |
| 12799 | |
| 12800 | tgt_bind = NULL; |
| 12801 | tgt_body = NULL; |
| 12802 | if (offloaded) |
| 12803 | { |
| 12804 | tgt_bind = gimple_seq_first_stmt_as_a_bind (s: gimple_omp_body (gs: stmt)); |
| 12805 | tgt_body = gimple_bind_body (gs: tgt_bind); |
| 12806 | } |
| 12807 | else if (data_region) |
| 12808 | tgt_body = gimple_omp_body (gs: stmt); |
| 12809 | child_fn = ctx->cb.dst_fn; |
| 12810 | |
| 12811 | push_gimplify_context (); |
| 12812 | fplist = NULL; |
| 12813 | |
| 12814 | ilist = NULL; |
| 12815 | olist = NULL; |
| 12816 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 12817 | switch (OMP_CLAUSE_CODE (c)) |
| 12818 | { |
| 12819 | tree var, x; |
| 12820 | |
| 12821 | default: |
| 12822 | break; |
| 12823 | case OMP_CLAUSE_MAP: |
| 12824 | #if CHECKING_P |
| 12825 | /* First check what we're prepared to handle in the following. */ |
| 12826 | switch (OMP_CLAUSE_MAP_KIND (c)) |
| 12827 | { |
| 12828 | case GOMP_MAP_ALLOC: |
| 12829 | case GOMP_MAP_TO: |
| 12830 | case GOMP_MAP_FROM: |
| 12831 | case GOMP_MAP_TOFROM: |
| 12832 | case GOMP_MAP_POINTER: |
| 12833 | case GOMP_MAP_TO_PSET: |
| 12834 | case GOMP_MAP_DELETE: |
| 12835 | case GOMP_MAP_RELEASE: |
| 12836 | case GOMP_MAP_ALWAYS_TO: |
| 12837 | case GOMP_MAP_ALWAYS_FROM: |
| 12838 | case GOMP_MAP_ALWAYS_TOFROM: |
| 12839 | case GOMP_MAP_FORCE_PRESENT: |
| 12840 | case GOMP_MAP_ALWAYS_PRESENT_FROM: |
| 12841 | case GOMP_MAP_ALWAYS_PRESENT_TO: |
| 12842 | case GOMP_MAP_ALWAYS_PRESENT_TOFROM: |
| 12843 | |
| 12844 | case GOMP_MAP_FIRSTPRIVATE_POINTER: |
| 12845 | case GOMP_MAP_FIRSTPRIVATE_REFERENCE: |
| 12846 | case GOMP_MAP_STRUCT: |
| 12847 | case GOMP_MAP_STRUCT_UNORD: |
| 12848 | case GOMP_MAP_ALWAYS_POINTER: |
| 12849 | case GOMP_MAP_ATTACH: |
| 12850 | case GOMP_MAP_DETACH: |
| 12851 | case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION: |
| 12852 | case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION: |
| 12853 | break; |
| 12854 | case GOMP_MAP_IF_PRESENT: |
| 12855 | case GOMP_MAP_FORCE_ALLOC: |
| 12856 | case GOMP_MAP_FORCE_TO: |
| 12857 | case GOMP_MAP_FORCE_FROM: |
| 12858 | case GOMP_MAP_FORCE_TOFROM: |
| 12859 | case GOMP_MAP_FORCE_DEVICEPTR: |
| 12860 | case GOMP_MAP_DEVICE_RESIDENT: |
| 12861 | case GOMP_MAP_LINK: |
| 12862 | case GOMP_MAP_FORCE_DETACH: |
| 12863 | gcc_assert (is_gimple_omp_oacc (stmt)); |
| 12864 | break; |
| 12865 | default: |
| 12866 | gcc_unreachable (); |
| 12867 | } |
| 12868 | #endif |
| 12869 | /* FALLTHRU */ |
| 12870 | case OMP_CLAUSE_TO: |
| 12871 | case OMP_CLAUSE_FROM: |
| 12872 | oacc_firstprivate: |
| 12873 | var = OMP_CLAUSE_DECL (c); |
| 12874 | { |
| 12875 | tree = lang_hooks.decls.omp_deep_mapping_cnt (stmt, c, &ilist); |
| 12876 | if (extra != NULL_TREE && deep_map_cnt != NULL_TREE) |
| 12877 | deep_map_cnt = fold_build2_loc (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
| 12878 | size_type_node, deep_map_cnt, |
| 12879 | extra); |
| 12880 | else if (extra != NULL_TREE) |
| 12881 | deep_map_cnt = extra; |
| 12882 | } |
| 12883 | |
| 12884 | if (deep_map_cnt |
| 12885 | && OMP_CLAUSE_HAS_ITERATORS (c)) |
| 12886 | sorry ("iterators used together with deep mapping are not " |
| 12887 | "supported yet" ); |
| 12888 | |
| 12889 | if (!DECL_P (var)) |
| 12890 | { |
| 12891 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
| 12892 | || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) |
| 12893 | && (OMP_CLAUSE_MAP_KIND (c) |
| 12894 | != GOMP_MAP_FIRSTPRIVATE_POINTER))) |
| 12895 | map_cnt++; |
| 12896 | continue; |
| 12897 | } |
| 12898 | |
| 12899 | if (DECL_SIZE (var) |
| 12900 | && !poly_int_tree_p (DECL_SIZE (var))) |
| 12901 | { |
| 12902 | tree var2 = DECL_VALUE_EXPR (var); |
| 12903 | gcc_assert (TREE_CODE (var2) == INDIRECT_REF); |
| 12904 | var2 = TREE_OPERAND (var2, 0); |
| 12905 | gcc_assert (DECL_P (var2)); |
| 12906 | var = var2; |
| 12907 | } |
| 12908 | |
| 12909 | if (offloaded |
| 12910 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 12911 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER |
| 12912 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) |
| 12913 | { |
| 12914 | if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) |
| 12915 | { |
| 12916 | if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var, ctx)) |
| 12917 | && varpool_node::get_create (decl: var)->offloadable) |
| 12918 | continue; |
| 12919 | |
| 12920 | tree type = build_pointer_type (TREE_TYPE (var)); |
| 12921 | tree new_var = lookup_decl (var, ctx); |
| 12922 | x = create_tmp_var_raw (type, get_name (new_var)); |
| 12923 | gimple_add_tmp_var (x); |
| 12924 | x = build_simple_mem_ref (x); |
| 12925 | SET_DECL_VALUE_EXPR (new_var, x); |
| 12926 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 12927 | } |
| 12928 | continue; |
| 12929 | } |
| 12930 | |
| 12931 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 12932 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
| 12933 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
| 12934 | && is_omp_target (stmt)) |
| 12935 | { |
| 12936 | gcc_assert (maybe_lookup_field (c, ctx)); |
| 12937 | map_cnt++; |
| 12938 | continue; |
| 12939 | } |
| 12940 | |
| 12941 | if (!maybe_lookup_field (var, ctx)) |
| 12942 | continue; |
| 12943 | |
| 12944 | /* Don't remap compute constructs' reduction variables, because the |
| 12945 | intermediate result must be local to each gang. */ |
| 12946 | if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 12947 | && is_gimple_omp_oacc (stmt: ctx->stmt) |
| 12948 | && OMP_CLAUSE_MAP_IN_REDUCTION (c))) |
| 12949 | { |
| 12950 | x = build_receiver_ref (var, by_ref: true, ctx); |
| 12951 | tree new_var = lookup_decl (var, ctx); |
| 12952 | |
| 12953 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 12954 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
| 12955 | && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) |
| 12956 | && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) |
| 12957 | x = build_simple_mem_ref (x); |
| 12958 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 12959 | { |
| 12960 | gcc_assert (is_gimple_omp_oacc (ctx->stmt)); |
| 12961 | if (omp_privatize_by_reference (decl: new_var) |
| 12962 | && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE |
| 12963 | || DECL_BY_REFERENCE (var))) |
| 12964 | { |
| 12965 | /* Create a local object to hold the instance |
| 12966 | value. */ |
| 12967 | tree type = TREE_TYPE (TREE_TYPE (new_var)); |
| 12968 | const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var)); |
| 12969 | tree inst = create_tmp_var (type, id); |
| 12970 | gimplify_assign (inst, fold_indirect_ref (x), &fplist); |
| 12971 | x = build_fold_addr_expr (inst); |
| 12972 | } |
| 12973 | gimplify_assign (new_var, x, &fplist); |
| 12974 | } |
| 12975 | else if (DECL_P (new_var)) |
| 12976 | { |
| 12977 | SET_DECL_VALUE_EXPR (new_var, x); |
| 12978 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 12979 | } |
| 12980 | else |
| 12981 | gcc_unreachable (); |
| 12982 | } |
| 12983 | map_cnt++; |
| 12984 | break; |
| 12985 | |
| 12986 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 12987 | omp_firstprivate_recv: |
| 12988 | gcc_checking_assert (offloaded); |
| 12989 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 12990 | { |
| 12991 | /* No 'firstprivate' clauses on OpenACC 'kernels'. */ |
| 12992 | gcc_checking_assert (!is_oacc_kernels (ctx)); |
| 12993 | /* Likewise, on OpenACC 'kernels' decomposed parts. */ |
| 12994 | gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx)); |
| 12995 | |
| 12996 | goto oacc_firstprivate; |
| 12997 | } |
| 12998 | map_cnt++; |
| 12999 | var = OMP_CLAUSE_DECL (c); |
| 13000 | if (!omp_privatize_by_reference (decl: var) |
| 13001 | && !is_gimple_reg_type (TREE_TYPE (var))) |
| 13002 | { |
| 13003 | tree new_var = lookup_decl (var, ctx); |
| 13004 | if (is_variable_sized (expr: var)) |
| 13005 | { |
| 13006 | tree pvar = DECL_VALUE_EXPR (var); |
| 13007 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 13008 | pvar = TREE_OPERAND (pvar, 0); |
| 13009 | gcc_assert (DECL_P (pvar)); |
| 13010 | tree new_pvar = lookup_decl (var: pvar, ctx); |
| 13011 | x = build_fold_indirect_ref (new_pvar); |
| 13012 | TREE_THIS_NOTRAP (x) = 1; |
| 13013 | } |
| 13014 | else |
| 13015 | x = build_receiver_ref (var, by_ref: true, ctx); |
| 13016 | SET_DECL_VALUE_EXPR (new_var, x); |
| 13017 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 13018 | } |
| 13019 | /* Fortran array descriptors: firstprivate of data + attach. */ |
| 13020 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR |
| 13021 | && lang_hooks.decls.omp_array_data (var, true)) |
| 13022 | map_cnt += 2; |
| 13023 | break; |
| 13024 | |
| 13025 | case OMP_CLAUSE_PRIVATE: |
| 13026 | gcc_checking_assert (offloaded); |
| 13027 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 13028 | { |
| 13029 | /* No 'private' clauses on OpenACC 'kernels'. */ |
| 13030 | gcc_checking_assert (!is_oacc_kernels (ctx)); |
| 13031 | /* Likewise, on OpenACC 'kernels' decomposed parts. */ |
| 13032 | gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx)); |
| 13033 | |
| 13034 | break; |
| 13035 | } |
| 13036 | var = OMP_CLAUSE_DECL (c); |
| 13037 | if (is_variable_sized (expr: var)) |
| 13038 | { |
| 13039 | tree new_var = lookup_decl (var, ctx); |
| 13040 | tree pvar = DECL_VALUE_EXPR (var); |
| 13041 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 13042 | pvar = TREE_OPERAND (pvar, 0); |
| 13043 | gcc_assert (DECL_P (pvar)); |
| 13044 | tree new_pvar = lookup_decl (var: pvar, ctx); |
| 13045 | x = build_fold_indirect_ref (new_pvar); |
| 13046 | TREE_THIS_NOTRAP (x) = 1; |
| 13047 | SET_DECL_VALUE_EXPR (new_var, x); |
| 13048 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 13049 | } |
| 13050 | break; |
| 13051 | |
| 13052 | case OMP_CLAUSE_USE_DEVICE_PTR: |
| 13053 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
| 13054 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
| 13055 | case OMP_CLAUSE_IS_DEVICE_PTR: |
| 13056 | var = OMP_CLAUSE_DECL (c); |
| 13057 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13058 | { |
| 13059 | while (TREE_CODE (var) == INDIRECT_REF |
| 13060 | || TREE_CODE (var) == ARRAY_REF) |
| 13061 | var = TREE_OPERAND (var, 0); |
| 13062 | if (lang_hooks.decls.omp_array_data (var, true)) |
| 13063 | goto omp_firstprivate_recv; |
| 13064 | } |
| 13065 | map_cnt++; |
| 13066 | if (is_variable_sized (expr: var)) |
| 13067 | { |
| 13068 | tree new_var = lookup_decl (var, ctx); |
| 13069 | tree pvar = DECL_VALUE_EXPR (var); |
| 13070 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 13071 | pvar = TREE_OPERAND (pvar, 0); |
| 13072 | gcc_assert (DECL_P (pvar)); |
| 13073 | tree new_pvar = lookup_decl (var: pvar, ctx); |
| 13074 | x = build_fold_indirect_ref (new_pvar); |
| 13075 | TREE_THIS_NOTRAP (x) = 1; |
| 13076 | SET_DECL_VALUE_EXPR (new_var, x); |
| 13077 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 13078 | } |
| 13079 | else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR |
| 13080 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13081 | && !omp_privatize_by_reference (decl: var) |
| 13082 | && !omp_is_allocatable_or_ptr (decl: var) |
| 13083 | && !lang_hooks.decls.omp_array_data (var, true)) |
| 13084 | || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) |
| 13085 | { |
| 13086 | tree new_var = lookup_decl (var, ctx); |
| 13087 | tree type = build_pointer_type (TREE_TYPE (var)); |
| 13088 | x = create_tmp_var_raw (type, get_name (new_var)); |
| 13089 | gimple_add_tmp_var (x); |
| 13090 | x = build_simple_mem_ref (x); |
| 13091 | SET_DECL_VALUE_EXPR (new_var, x); |
| 13092 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 13093 | } |
| 13094 | else |
| 13095 | { |
| 13096 | tree new_var = lookup_decl (var, ctx); |
| 13097 | x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var)); |
| 13098 | gimple_add_tmp_var (x); |
| 13099 | SET_DECL_VALUE_EXPR (new_var, x); |
| 13100 | DECL_HAS_VALUE_EXPR_P (new_var) = 1; |
| 13101 | } |
| 13102 | break; |
| 13103 | case OMP_CLAUSE_DEVICE_TYPE: |
| 13104 | /* FIXME: Ensure that 'nohost' also has not implied before that |
| 13105 | 'g->have_offload = true' or an implicit declare target. */ |
| 13106 | if (OMP_CLAUSE_DEVICE_TYPE_KIND (c) != OMP_CLAUSE_DEVICE_TYPE_ANY) |
| 13107 | sorry_at (OMP_CLAUSE_LOCATION (c), |
| 13108 | "only the %<device_type(any)%> is supported" ); |
| 13109 | break; |
| 13110 | } |
| 13111 | |
| 13112 | if (offloaded) |
| 13113 | { |
| 13114 | target_nesting_level++; |
| 13115 | lower_omp (&tgt_body, ctx); |
| 13116 | target_nesting_level--; |
| 13117 | } |
| 13118 | else if (data_region) |
| 13119 | lower_omp (&tgt_body, ctx); |
| 13120 | |
| 13121 | if (offloaded) |
| 13122 | { |
| 13123 | /* Declare all the variables created by mapping and the variables |
| 13124 | declared in the scope of the target body. */ |
| 13125 | record_vars_into (ctx->block_vars, child_fn); |
| 13126 | maybe_remove_omp_member_access_dummy_vars (bind: tgt_bind); |
| 13127 | record_vars_into (gimple_bind_vars (bind_stmt: tgt_bind), child_fn); |
| 13128 | } |
| 13129 | |
| 13130 | if (ctx->record_type) |
| 13131 | { |
| 13132 | if (deep_map_cnt && TREE_CODE (deep_map_cnt) == INTEGER_CST) |
| 13133 | /* map_cnt = map_cnt + tree_to_hwi (deep_map_cnt); */ |
| 13134 | /* deep_map_cnt = NULL_TREE; */ |
| 13135 | gcc_unreachable (); |
| 13136 | else if (deep_map_cnt) |
| 13137 | { |
| 13138 | gcc_assert (flexible_array_type_p (ctx->record_type)); |
| 13139 | tree n = create_tmp_var_raw (size_type_node, "nn_map" ); |
| 13140 | gimple_add_tmp_var (n); |
| 13141 | gimplify_assign (n, deep_map_cnt, &ilist); |
| 13142 | deep_map_cnt = n; |
| 13143 | } |
| 13144 | ctx->sender_decl |
| 13145 | = create_tmp_var (deep_map_cnt ? build_pointer_type (ctx->record_type) |
| 13146 | : ctx->record_type, ".omp_data_arr" ); |
| 13147 | DECL_NAMELESS (ctx->sender_decl) = 1; |
| 13148 | TREE_ADDRESSABLE (ctx->sender_decl) = 1; |
| 13149 | t = make_tree_vec (deep_map_cnt ? 4 : 3); |
| 13150 | TREE_VEC_ELT (t, 0) = ctx->sender_decl; |
| 13151 | TREE_VEC_ELT (t, 1) |
| 13152 | = create_tmp_var (deep_map_cnt |
| 13153 | ? build_pointer_type (size_type_node) |
| 13154 | : build_array_type_nelts (size_type_node, map_cnt), |
| 13155 | ".omp_data_sizes" ); |
| 13156 | DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1; |
| 13157 | TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1; |
| 13158 | TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1; |
| 13159 | tree tkind_type = short_unsigned_type_node; |
| 13160 | int talign_shift = 8; |
| 13161 | TREE_VEC_ELT (t, 2) |
| 13162 | = create_tmp_var (deep_map_cnt |
| 13163 | ? build_pointer_type (tkind_type) |
| 13164 | : build_array_type_nelts (tkind_type, map_cnt), |
| 13165 | ".omp_data_kinds" ); |
| 13166 | DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1; |
| 13167 | TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1; |
| 13168 | TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1; |
| 13169 | gimple_omp_target_set_data_arg (omp_target_stmt: stmt, data_arg: t); |
| 13170 | |
| 13171 | if (deep_map_cnt) |
| 13172 | { |
| 13173 | tree tmp, size; |
| 13174 | size = create_tmp_var (size_type_node, NULL); |
| 13175 | DECL_NAMELESS (size) = 1; |
| 13176 | gimplify_assign (size, |
| 13177 | fold_build2_loc (UNKNOWN_LOCATION, PLUS_EXPR, |
| 13178 | size_type_node, deep_map_cnt, |
| 13179 | build_int_cst (size_type_node, |
| 13180 | map_cnt)), &ilist); |
| 13181 | TREE_VEC_ELT (t, 3) = size; |
| 13182 | |
| 13183 | tree call = builtin_decl_explicit (fncode: BUILT_IN_MALLOC); |
| 13184 | size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR, |
| 13185 | size_type_node, deep_map_cnt, |
| 13186 | TYPE_SIZE_UNIT (ptr_type_node)); |
| 13187 | size = fold_build2_loc (UNKNOWN_LOCATION, PLUS_EXPR, |
| 13188 | size_type_node, size, |
| 13189 | TYPE_SIZE_UNIT (ctx->record_type)); |
| 13190 | tmp = build_call_expr_loc (input_location, call, 1, size); |
| 13191 | gimplify_assign (ctx->sender_decl, tmp, &ilist); |
| 13192 | |
| 13193 | size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR, |
| 13194 | size_type_node, TREE_VEC_ELT (t, 3), |
| 13195 | TYPE_SIZE_UNIT (size_type_node)); |
| 13196 | tmp = build_call_expr_loc (input_location, call, 1, size); |
| 13197 | gimplify_assign (TREE_VEC_ELT (t, 1), tmp, &ilist); |
| 13198 | |
| 13199 | size = fold_build2_loc (UNKNOWN_LOCATION, MULT_EXPR, |
| 13200 | size_type_node, TREE_VEC_ELT (t, 3), |
| 13201 | TYPE_SIZE_UNIT (tkind_type)); |
| 13202 | tmp = build_call_expr_loc (input_location, call, 1, size); |
| 13203 | gimplify_assign (TREE_VEC_ELT (t, 2), tmp, &ilist); |
| 13204 | tree field = TYPE_FIELDS (TREE_TYPE (TREE_TYPE (ctx->sender_decl))); |
| 13205 | for ( ; DECL_CHAIN (field) != NULL_TREE; field = DECL_CHAIN (field)) |
| 13206 | ; |
| 13207 | gcc_assert (TREE_CODE (TREE_TYPE (field))); |
| 13208 | tmp = build_fold_indirect_ref (ctx->sender_decl); |
| 13209 | deep_map_data = omp_build_component_ref (obj: tmp, field); |
| 13210 | deep_map_offset_data = create_tmp_var_raw (size_type_node, |
| 13211 | "map_offset_data" ); |
| 13212 | deep_map_offset = create_tmp_var_raw (size_type_node, "map_offset" ); |
| 13213 | gimple_add_tmp_var (deep_map_offset_data); |
| 13214 | gimple_add_tmp_var (deep_map_offset); |
| 13215 | gimplify_assign (deep_map_offset_data, build_int_cst (size_type_node, |
| 13216 | 0), &ilist); |
| 13217 | gimplify_assign (deep_map_offset, build_int_cst (size_type_node, |
| 13218 | map_cnt), &ilist); |
| 13219 | } |
| 13220 | |
| 13221 | vec<constructor_elt, va_gc> *vsize; |
| 13222 | vec<constructor_elt, va_gc> *vkind; |
| 13223 | vec_alloc (v&: vsize, nelems: map_cnt); |
| 13224 | vec_alloc (v&: vkind, nelems: map_cnt); |
| 13225 | unsigned int map_idx = 0; |
| 13226 | |
| 13227 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 13228 | switch (OMP_CLAUSE_CODE (c)) |
| 13229 | { |
| 13230 | tree ovar, nc, s, purpose, var, x, type; |
| 13231 | unsigned int talign; |
| 13232 | |
| 13233 | default: |
| 13234 | break; |
| 13235 | |
| 13236 | case OMP_CLAUSE_MAP: |
| 13237 | case OMP_CLAUSE_TO: |
| 13238 | case OMP_CLAUSE_FROM: |
| 13239 | oacc_firstprivate_map: |
| 13240 | nc = c; |
| 13241 | ovar = OMP_CLAUSE_DECL (c); |
| 13242 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13243 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER |
| 13244 | || (OMP_CLAUSE_MAP_KIND (c) |
| 13245 | == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) |
| 13246 | break; |
| 13247 | if (deep_map_cnt) |
| 13248 | { |
| 13249 | unsigned HOST_WIDE_INT tkind2; |
| 13250 | switch (OMP_CLAUSE_CODE (c)) |
| 13251 | { |
| 13252 | case OMP_CLAUSE_MAP: |
| 13253 | tkind2 = OMP_CLAUSE_MAP_KIND (c); |
| 13254 | if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c) |
| 13255 | && (((tkind2 & GOMP_MAP_FLAG_SPECIAL_BITS) |
| 13256 | & ~GOMP_MAP_IMPLICIT) |
| 13257 | == 0)) |
| 13258 | { |
| 13259 | /* If this is an implicit map, and the GOMP_MAP_IMPLICIT |
| 13260 | bits are not interfered by other special bit |
| 13261 | encodings, then turn the GOMP_IMPLICIT_BIT flag on |
| 13262 | for the runtime to see. */ |
| 13263 | tkind2 |= GOMP_MAP_IMPLICIT; |
| 13264 | } |
| 13265 | break; |
| 13266 | case OMP_CLAUSE_FIRSTPRIVATE: tkind2 = GOMP_MAP_TO; break; |
| 13267 | case OMP_CLAUSE_TO: tkind2 = GOMP_MAP_TO; break; |
| 13268 | case OMP_CLAUSE_FROM: tkind2 = GOMP_MAP_FROM; break; |
| 13269 | default: gcc_unreachable (); |
| 13270 | } |
| 13271 | lang_hooks.decls.omp_deep_mapping (stmt, c, tkind2, |
| 13272 | deep_map_data, |
| 13273 | TREE_VEC_ELT (t, 1), |
| 13274 | TREE_VEC_ELT (t, 2), |
| 13275 | deep_map_offset_data, |
| 13276 | deep_map_offset, &ilist); |
| 13277 | } |
| 13278 | if (!DECL_P (ovar)) |
| 13279 | { |
| 13280 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13281 | && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)) |
| 13282 | { |
| 13283 | nc = OMP_CLAUSE_CHAIN (c); |
| 13284 | gcc_checking_assert (OMP_CLAUSE_DECL (nc) |
| 13285 | == get_base_address (ovar)); |
| 13286 | ovar = OMP_CLAUSE_DECL (nc); |
| 13287 | } |
| 13288 | else |
| 13289 | { |
| 13290 | tree x = build_sender_ref (var: ovar, ctx); |
| 13291 | tree v = ovar; |
| 13292 | if (in_reduction_clauses |
| 13293 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13294 | && OMP_CLAUSE_MAP_IN_REDUCTION (c)) |
| 13295 | { |
| 13296 | v = unshare_expr (v); |
| 13297 | tree *p = &v; |
| 13298 | while (handled_component_p (t: *p) |
| 13299 | || TREE_CODE (*p) == INDIRECT_REF |
| 13300 | || TREE_CODE (*p) == ADDR_EXPR |
| 13301 | || TREE_CODE (*p) == MEM_REF |
| 13302 | || TREE_CODE (*p) == NON_LVALUE_EXPR) |
| 13303 | p = &TREE_OPERAND (*p, 0); |
| 13304 | tree d = *p; |
| 13305 | if (is_variable_sized (expr: d)) |
| 13306 | { |
| 13307 | gcc_assert (DECL_HAS_VALUE_EXPR_P (d)); |
| 13308 | d = DECL_VALUE_EXPR (d); |
| 13309 | gcc_assert (TREE_CODE (d) == INDIRECT_REF); |
| 13310 | d = TREE_OPERAND (d, 0); |
| 13311 | gcc_assert (DECL_P (d)); |
| 13312 | } |
| 13313 | splay_tree_key key |
| 13314 | = (splay_tree_key) &DECL_CONTEXT (d); |
| 13315 | tree nd = (tree) splay_tree_lookup (ctx->field_map, |
| 13316 | key)->value; |
| 13317 | if (d == *p) |
| 13318 | *p = nd; |
| 13319 | else |
| 13320 | *p = build_fold_indirect_ref (nd); |
| 13321 | } |
| 13322 | v = build_fold_addr_expr_with_type (v, ptr_type_node); |
| 13323 | v = lower_omp_map_iterator_expr (expr: v, c, stmt); |
| 13324 | gimplify_assign (x, v, &ilist); |
| 13325 | nc = NULL_TREE; |
| 13326 | } |
| 13327 | } |
| 13328 | else |
| 13329 | { |
| 13330 | if (DECL_SIZE (ovar) |
| 13331 | && !poly_int_tree_p (DECL_SIZE (ovar))) |
| 13332 | { |
| 13333 | tree ovar2 = DECL_VALUE_EXPR (ovar); |
| 13334 | gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF); |
| 13335 | ovar2 = TREE_OPERAND (ovar2, 0); |
| 13336 | gcc_assert (DECL_P (ovar2)); |
| 13337 | ovar = ovar2; |
| 13338 | } |
| 13339 | if (!maybe_lookup_field (var: ovar, ctx) |
| 13340 | && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13341 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
| 13342 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))) |
| 13343 | continue; |
| 13344 | } |
| 13345 | |
| 13346 | talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar)); |
| 13347 | if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign) |
| 13348 | talign = DECL_ALIGN_UNIT (ovar); |
| 13349 | |
| 13350 | var = NULL_TREE; |
| 13351 | if (nc) |
| 13352 | { |
| 13353 | if (in_reduction_clauses |
| 13354 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13355 | && OMP_CLAUSE_MAP_IN_REDUCTION (c)) |
| 13356 | { |
| 13357 | tree d = ovar; |
| 13358 | if (is_variable_sized (expr: d)) |
| 13359 | { |
| 13360 | gcc_assert (DECL_HAS_VALUE_EXPR_P (d)); |
| 13361 | d = DECL_VALUE_EXPR (d); |
| 13362 | gcc_assert (TREE_CODE (d) == INDIRECT_REF); |
| 13363 | d = TREE_OPERAND (d, 0); |
| 13364 | gcc_assert (DECL_P (d)); |
| 13365 | } |
| 13366 | splay_tree_key key |
| 13367 | = (splay_tree_key) &DECL_CONTEXT (d); |
| 13368 | tree nd = (tree) splay_tree_lookup (ctx->field_map, |
| 13369 | key)->value; |
| 13370 | if (d == ovar) |
| 13371 | var = nd; |
| 13372 | else |
| 13373 | var = build_fold_indirect_ref (nd); |
| 13374 | } |
| 13375 | else |
| 13376 | var = lookup_decl_in_outer_ctx (decl: ovar, ctx); |
| 13377 | } |
| 13378 | if (nc |
| 13379 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13380 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
| 13381 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
| 13382 | && is_omp_target (stmt)) |
| 13383 | { |
| 13384 | x = build_sender_ref (var: c, ctx); |
| 13385 | gimplify_assign (x, build_fold_addr_expr (var), &ilist); |
| 13386 | } |
| 13387 | else if (nc) |
| 13388 | { |
| 13389 | x = build_sender_ref (var: ovar, ctx); |
| 13390 | |
| 13391 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
| 13392 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
| 13393 | && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) |
| 13394 | && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE) |
| 13395 | { |
| 13396 | gcc_assert (offloaded); |
| 13397 | tree avar = build_fold_addr_expr (var); |
| 13398 | if (!OMP_CLAUSE_ITERATORS (c)) |
| 13399 | { |
| 13400 | tree tmp = create_tmp_var (TREE_TYPE (TREE_TYPE (x))); |
| 13401 | mark_addressable (tmp); |
| 13402 | gimplify_assign (tmp, avar, &ilist); |
| 13403 | avar = tmp; |
| 13404 | } |
| 13405 | talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (x))); |
| 13406 | avar = build_fold_addr_expr (avar); |
| 13407 | avar = lower_omp_map_iterator_expr (expr: avar, c, stmt); |
| 13408 | gimplify_assign (x, avar, &ilist); |
| 13409 | } |
| 13410 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 13411 | { |
| 13412 | gcc_assert (is_gimple_omp_oacc (ctx->stmt)); |
| 13413 | if (!omp_privatize_by_reference (decl: var)) |
| 13414 | { |
| 13415 | if (is_gimple_reg (var) |
| 13416 | && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) |
| 13417 | suppress_warning (var); |
| 13418 | var = build_fold_addr_expr (var); |
| 13419 | } |
| 13420 | else |
| 13421 | talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar))); |
| 13422 | gimplify_assign (x, var, &ilist); |
| 13423 | } |
| 13424 | else if (is_gimple_reg (var)) |
| 13425 | { |
| 13426 | gcc_assert (offloaded); |
| 13427 | tree avar = create_tmp_var (TREE_TYPE (var)); |
| 13428 | mark_addressable (avar); |
| 13429 | enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c); |
| 13430 | if (GOMP_MAP_COPY_TO_P (map_kind) |
| 13431 | || map_kind == GOMP_MAP_POINTER |
| 13432 | || map_kind == GOMP_MAP_TO_PSET |
| 13433 | || map_kind == GOMP_MAP_FORCE_DEVICEPTR) |
| 13434 | { |
| 13435 | /* If we need to initialize a temporary |
| 13436 | with VAR because it is not addressable, and |
| 13437 | the variable hasn't been initialized yet, then |
| 13438 | we'll get a warning for the store to avar. |
| 13439 | Don't warn in that case, the mapping might |
| 13440 | be implicit. */ |
| 13441 | suppress_warning (var, OPT_Wuninitialized); |
| 13442 | gimplify_assign (avar, var, &ilist); |
| 13443 | } |
| 13444 | avar = build_fold_addr_expr (avar); |
| 13445 | gimplify_assign (x, avar, &ilist); |
| 13446 | if ((GOMP_MAP_COPY_FROM_P (map_kind) |
| 13447 | || map_kind == GOMP_MAP_FORCE_DEVICEPTR) |
| 13448 | && !TYPE_READONLY (TREE_TYPE (var))) |
| 13449 | { |
| 13450 | x = unshare_expr (x); |
| 13451 | x = build_simple_mem_ref (x); |
| 13452 | gimplify_assign (var, x, &olist); |
| 13453 | } |
| 13454 | } |
| 13455 | else |
| 13456 | { |
| 13457 | /* While MAP is handled explicitly by the FE, |
| 13458 | for 'target update', only the identified is passed. */ |
| 13459 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM |
| 13460 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO) |
| 13461 | && (omp_is_allocatable_or_ptr (decl: var) |
| 13462 | && omp_check_optional_argument (decl: var, for_present_check: false))) |
| 13463 | var = build_fold_indirect_ref (var); |
| 13464 | else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM |
| 13465 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO) |
| 13466 | || (!omp_is_allocatable_or_ptr (decl: var) |
| 13467 | && !omp_check_optional_argument (decl: var, for_present_check: false))) |
| 13468 | var = build_fold_addr_expr (var); |
| 13469 | gimplify_assign (x, var, &ilist); |
| 13470 | } |
| 13471 | } |
| 13472 | s = NULL_TREE; |
| 13473 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
| 13474 | { |
| 13475 | gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt)); |
| 13476 | s = TREE_TYPE (ovar); |
| 13477 | if (TREE_CODE (s) == REFERENCE_TYPE |
| 13478 | || omp_check_optional_argument (decl: ovar, for_present_check: false)) |
| 13479 | s = TREE_TYPE (s); |
| 13480 | s = TYPE_SIZE_UNIT (s); |
| 13481 | } |
| 13482 | else |
| 13483 | s = OMP_CLAUSE_SIZE (c); |
| 13484 | if (s == NULL_TREE) |
| 13485 | s = TYPE_SIZE_UNIT (TREE_TYPE (ovar)); |
| 13486 | s = fold_convert (size_type_node, s); |
| 13487 | s = lower_omp_map_iterator_size (size: s, c, stmt); |
| 13488 | purpose = size_int (map_idx++); |
| 13489 | CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); |
| 13490 | if (TREE_CODE (s) != INTEGER_CST) |
| 13491 | TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0; |
| 13492 | |
| 13493 | unsigned HOST_WIDE_INT tkind, tkind_zero; |
| 13494 | switch (OMP_CLAUSE_CODE (c)) |
| 13495 | { |
| 13496 | case OMP_CLAUSE_MAP: |
| 13497 | tkind = OMP_CLAUSE_MAP_KIND (c); |
| 13498 | tkind_zero = tkind; |
| 13499 | if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c)) |
| 13500 | switch (tkind) |
| 13501 | { |
| 13502 | case GOMP_MAP_ALLOC: |
| 13503 | case GOMP_MAP_IF_PRESENT: |
| 13504 | case GOMP_MAP_TO: |
| 13505 | case GOMP_MAP_FROM: |
| 13506 | case GOMP_MAP_TOFROM: |
| 13507 | case GOMP_MAP_ALWAYS_TO: |
| 13508 | case GOMP_MAP_ALWAYS_FROM: |
| 13509 | case GOMP_MAP_ALWAYS_TOFROM: |
| 13510 | case GOMP_MAP_ALWAYS_PRESENT_TO: |
| 13511 | case GOMP_MAP_ALWAYS_PRESENT_FROM: |
| 13512 | case GOMP_MAP_ALWAYS_PRESENT_TOFROM: |
| 13513 | case GOMP_MAP_RELEASE: |
| 13514 | case GOMP_MAP_FORCE_TO: |
| 13515 | case GOMP_MAP_FORCE_FROM: |
| 13516 | case GOMP_MAP_FORCE_TOFROM: |
| 13517 | case GOMP_MAP_FORCE_PRESENT: |
| 13518 | tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION; |
| 13519 | break; |
| 13520 | case GOMP_MAP_DELETE: |
| 13521 | tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION; |
| 13522 | default: |
| 13523 | break; |
| 13524 | } |
| 13525 | if (tkind_zero != tkind) |
| 13526 | { |
| 13527 | if (integer_zerop (s)) |
| 13528 | tkind = tkind_zero; |
| 13529 | else if (integer_nonzerop (s)) |
| 13530 | tkind_zero = tkind; |
| 13531 | } |
| 13532 | if (tkind_zero == tkind |
| 13533 | && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c) |
| 13534 | && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS) |
| 13535 | & ~GOMP_MAP_IMPLICIT) |
| 13536 | == 0)) |
| 13537 | { |
| 13538 | /* If this is an implicit map, and the GOMP_MAP_IMPLICIT |
| 13539 | bits are not interfered by other special bit encodings, |
| 13540 | then turn the GOMP_IMPLICIT_BIT flag on for the runtime |
| 13541 | to see. */ |
| 13542 | tkind |= GOMP_MAP_IMPLICIT; |
| 13543 | tkind_zero = tkind; |
| 13544 | } |
| 13545 | break; |
| 13546 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 13547 | gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt)); |
| 13548 | tkind = GOMP_MAP_TO; |
| 13549 | tkind_zero = tkind; |
| 13550 | break; |
| 13551 | case OMP_CLAUSE_TO: |
| 13552 | tkind |
| 13553 | = (OMP_CLAUSE_MOTION_PRESENT (c) |
| 13554 | ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO); |
| 13555 | tkind_zero = tkind; |
| 13556 | break; |
| 13557 | case OMP_CLAUSE_FROM: |
| 13558 | tkind |
| 13559 | = (OMP_CLAUSE_MOTION_PRESENT (c) |
| 13560 | ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM); |
| 13561 | tkind_zero = tkind; |
| 13562 | break; |
| 13563 | default: |
| 13564 | gcc_unreachable (); |
| 13565 | } |
| 13566 | gcc_checking_assert (tkind |
| 13567 | < (HOST_WIDE_INT_C (1U) << talign_shift)); |
| 13568 | gcc_checking_assert (tkind_zero |
| 13569 | < (HOST_WIDE_INT_C (1U) << talign_shift)); |
| 13570 | talign = ceil_log2 (x: talign); |
| 13571 | tkind |= talign << talign_shift; |
| 13572 | tkind_zero |= talign << talign_shift; |
| 13573 | gcc_checking_assert (tkind |
| 13574 | <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); |
| 13575 | gcc_checking_assert (tkind_zero |
| 13576 | <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); |
| 13577 | if (tkind == tkind_zero) |
| 13578 | x = build_int_cstu (type: tkind_type, tkind); |
| 13579 | else |
| 13580 | { |
| 13581 | TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0; |
| 13582 | x = build3 (COND_EXPR, tkind_type, |
| 13583 | fold_build2 (EQ_EXPR, boolean_type_node, |
| 13584 | unshare_expr (s), size_zero_node), |
| 13585 | build_int_cstu (type: tkind_type, tkind_zero), |
| 13586 | build_int_cstu (type: tkind_type, tkind)); |
| 13587 | } |
| 13588 | CONSTRUCTOR_APPEND_ELT (vkind, purpose, x); |
| 13589 | if (nc && nc != c) |
| 13590 | c = nc; |
| 13591 | break; |
| 13592 | |
| 13593 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 13594 | omp_has_device_addr_descr: |
| 13595 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 13596 | goto oacc_firstprivate_map; |
| 13597 | ovar = OMP_CLAUSE_DECL (c); |
| 13598 | if (omp_privatize_by_reference (decl: ovar)) |
| 13599 | talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar))); |
| 13600 | else |
| 13601 | talign = DECL_ALIGN_UNIT (ovar); |
| 13602 | var = lookup_decl_in_outer_ctx (decl: ovar, ctx); |
| 13603 | x = build_sender_ref (var: ovar, ctx); |
| 13604 | tkind = GOMP_MAP_FIRSTPRIVATE; |
| 13605 | type = TREE_TYPE (ovar); |
| 13606 | if (omp_privatize_by_reference (decl: ovar)) |
| 13607 | type = TREE_TYPE (type); |
| 13608 | if ((INTEGRAL_TYPE_P (type) |
| 13609 | && TYPE_PRECISION (type) <= POINTER_SIZE) |
| 13610 | || TREE_CODE (type) == POINTER_TYPE) |
| 13611 | { |
| 13612 | tkind = GOMP_MAP_FIRSTPRIVATE_INT; |
| 13613 | tree t = var; |
| 13614 | if (omp_privatize_by_reference (decl: var)) |
| 13615 | t = build_simple_mem_ref (var); |
| 13616 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) |
| 13617 | suppress_warning (var); |
| 13618 | if (TREE_CODE (type) != POINTER_TYPE) |
| 13619 | t = fold_convert (pointer_sized_int_node, t); |
| 13620 | t = fold_convert (TREE_TYPE (x), t); |
| 13621 | gimplify_assign (x, t, &ilist); |
| 13622 | } |
| 13623 | else if (omp_privatize_by_reference (decl: var)) |
| 13624 | gimplify_assign (x, var, &ilist); |
| 13625 | else if (is_gimple_reg (var)) |
| 13626 | { |
| 13627 | tree avar = create_tmp_var (TREE_TYPE (var)); |
| 13628 | mark_addressable (avar); |
| 13629 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) |
| 13630 | suppress_warning (var); |
| 13631 | gimplify_assign (avar, var, &ilist); |
| 13632 | avar = build_fold_addr_expr (avar); |
| 13633 | gimplify_assign (x, avar, &ilist); |
| 13634 | } |
| 13635 | else |
| 13636 | { |
| 13637 | var = build_fold_addr_expr (var); |
| 13638 | gimplify_assign (x, var, &ilist); |
| 13639 | } |
| 13640 | if (tkind == GOMP_MAP_FIRSTPRIVATE_INT) |
| 13641 | s = size_int (0); |
| 13642 | else if (omp_privatize_by_reference (decl: ovar)) |
| 13643 | s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar))); |
| 13644 | else |
| 13645 | s = TYPE_SIZE_UNIT (TREE_TYPE (ovar)); |
| 13646 | s = fold_convert (size_type_node, s); |
| 13647 | purpose = size_int (map_idx++); |
| 13648 | CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); |
| 13649 | if (TREE_CODE (s) != INTEGER_CST) |
| 13650 | TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0; |
| 13651 | |
| 13652 | gcc_checking_assert (tkind |
| 13653 | < (HOST_WIDE_INT_C (1U) << talign_shift)); |
| 13654 | talign = ceil_log2 (x: talign); |
| 13655 | tkind |= talign << talign_shift; |
| 13656 | gcc_checking_assert (tkind |
| 13657 | <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); |
| 13658 | CONSTRUCTOR_APPEND_ELT (vkind, purpose, |
| 13659 | build_int_cstu (tkind_type, tkind)); |
| 13660 | /* Fortran array descriptors: firstprivate of data + attach. */ |
| 13661 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR |
| 13662 | && lang_hooks.decls.omp_array_data (ovar, true)) |
| 13663 | { |
| 13664 | tree not_null_lb, null_lb, after_lb; |
| 13665 | tree var1, var2, size1, size2; |
| 13666 | tree present = omp_check_optional_argument (decl: ovar, for_present_check: true); |
| 13667 | if (present) |
| 13668 | { |
| 13669 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 13670 | not_null_lb = create_artificial_label (clause_loc); |
| 13671 | null_lb = create_artificial_label (clause_loc); |
| 13672 | after_lb = create_artificial_label (clause_loc); |
| 13673 | gimple_seq seq = NULL; |
| 13674 | present = force_gimple_operand (present, &seq, true, |
| 13675 | NULL_TREE); |
| 13676 | gimple_seq_add_seq (&ilist, seq); |
| 13677 | gimple_seq_add_stmt (&ilist, |
| 13678 | gimple_build_cond_from_tree (present, |
| 13679 | not_null_lb, null_lb)); |
| 13680 | gimple_seq_add_stmt (&ilist, |
| 13681 | gimple_build_label (label: not_null_lb)); |
| 13682 | } |
| 13683 | var1 = lang_hooks.decls.omp_array_data (var, false); |
| 13684 | size1 = lang_hooks.decls.omp_array_size (var, &ilist); |
| 13685 | var2 = build_fold_addr_expr (x); |
| 13686 | if (!POINTER_TYPE_P (TREE_TYPE (var))) |
| 13687 | var = build_fold_addr_expr (var); |
| 13688 | size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype, |
| 13689 | build_fold_addr_expr (var1), var); |
| 13690 | size2 = fold_convert (sizetype, size2); |
| 13691 | if (present) |
| 13692 | { |
| 13693 | tree tmp = create_tmp_var (TREE_TYPE (var1)); |
| 13694 | gimplify_assign (tmp, var1, &ilist); |
| 13695 | var1 = tmp; |
| 13696 | tmp = create_tmp_var (TREE_TYPE (var2)); |
| 13697 | gimplify_assign (tmp, var2, &ilist); |
| 13698 | var2 = tmp; |
| 13699 | tmp = create_tmp_var (TREE_TYPE (size1)); |
| 13700 | gimplify_assign (tmp, size1, &ilist); |
| 13701 | size1 = tmp; |
| 13702 | tmp = create_tmp_var (TREE_TYPE (size2)); |
| 13703 | gimplify_assign (tmp, size2, &ilist); |
| 13704 | size2 = tmp; |
| 13705 | gimple_seq_add_stmt (&ilist, gimple_build_goto (dest: after_lb)); |
| 13706 | gimple_seq_add_stmt (&ilist, gimple_build_label (label: null_lb)); |
| 13707 | gimplify_assign (var1, null_pointer_node, &ilist); |
| 13708 | gimplify_assign (var2, null_pointer_node, &ilist); |
| 13709 | gimplify_assign (size1, size_zero_node, &ilist); |
| 13710 | gimplify_assign (size2, size_zero_node, &ilist); |
| 13711 | gimple_seq_add_stmt (&ilist, gimple_build_label (label: after_lb)); |
| 13712 | } |
| 13713 | x = build_sender_ref (key: (splay_tree_key) &DECL_NAME (ovar), ctx); |
| 13714 | gimplify_assign (x, var1, &ilist); |
| 13715 | tkind = GOMP_MAP_FIRSTPRIVATE; |
| 13716 | talign = DECL_ALIGN_UNIT (ovar); |
| 13717 | talign = ceil_log2 (x: talign); |
| 13718 | tkind |= talign << talign_shift; |
| 13719 | gcc_checking_assert (tkind |
| 13720 | <= tree_to_uhwi ( |
| 13721 | TYPE_MAX_VALUE (tkind_type))); |
| 13722 | purpose = size_int (map_idx++); |
| 13723 | CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1); |
| 13724 | if (TREE_CODE (size1) != INTEGER_CST) |
| 13725 | TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0; |
| 13726 | CONSTRUCTOR_APPEND_ELT (vkind, purpose, |
| 13727 | build_int_cstu (tkind_type, tkind)); |
| 13728 | x = build_sender_ref (key: (splay_tree_key) &DECL_UID (ovar), ctx); |
| 13729 | gimplify_assign (x, var2, &ilist); |
| 13730 | tkind = GOMP_MAP_ATTACH; |
| 13731 | purpose = size_int (map_idx++); |
| 13732 | CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2); |
| 13733 | CONSTRUCTOR_APPEND_ELT (vkind, purpose, |
| 13734 | build_int_cstu (tkind_type, tkind)); |
| 13735 | } |
| 13736 | break; |
| 13737 | |
| 13738 | case OMP_CLAUSE_USE_DEVICE_PTR: |
| 13739 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
| 13740 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
| 13741 | case OMP_CLAUSE_IS_DEVICE_PTR: |
| 13742 | ovar = OMP_CLAUSE_DECL (c); |
| 13743 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13744 | { |
| 13745 | if (lang_hooks.decls.omp_array_data (ovar, true)) |
| 13746 | goto omp_has_device_addr_descr; |
| 13747 | while (TREE_CODE (ovar) == INDIRECT_REF |
| 13748 | || TREE_CODE (ovar) == ARRAY_REF) |
| 13749 | ovar = TREE_OPERAND (ovar, 0); |
| 13750 | } |
| 13751 | var = lookup_decl_in_outer_ctx (decl: ovar, ctx); |
| 13752 | |
| 13753 | if (lang_hooks.decls.omp_array_data (ovar, true)) |
| 13754 | { |
| 13755 | tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR |
| 13756 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13757 | ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT); |
| 13758 | x = build_sender_ref (key: (splay_tree_key) &DECL_NAME (ovar), ctx); |
| 13759 | } |
| 13760 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR |
| 13761 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13762 | { |
| 13763 | tkind = GOMP_MAP_USE_DEVICE_PTR; |
| 13764 | x = build_sender_ref (key: (splay_tree_key) &DECL_UID (ovar), ctx); |
| 13765 | } |
| 13766 | else |
| 13767 | { |
| 13768 | tkind = GOMP_MAP_FIRSTPRIVATE_INT; |
| 13769 | x = build_sender_ref (var: ovar, ctx); |
| 13770 | } |
| 13771 | |
| 13772 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 13773 | { |
| 13774 | gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR); |
| 13775 | |
| 13776 | if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c)) |
| 13777 | tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT; |
| 13778 | } |
| 13779 | |
| 13780 | type = TREE_TYPE (ovar); |
| 13781 | if (lang_hooks.decls.omp_array_data (ovar, true)) |
| 13782 | var = lang_hooks.decls.omp_array_data (var, false); |
| 13783 | else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR |
| 13784 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 13785 | && !omp_privatize_by_reference (decl: ovar) |
| 13786 | && !omp_is_allocatable_or_ptr (decl: ovar)) |
| 13787 | || TREE_CODE (type) == ARRAY_TYPE) |
| 13788 | var = build_fold_addr_expr (var); |
| 13789 | else |
| 13790 | { |
| 13791 | if (omp_privatize_by_reference (decl: ovar) |
| 13792 | || omp_check_optional_argument (decl: ovar, for_present_check: false) |
| 13793 | || omp_is_allocatable_or_ptr (decl: ovar)) |
| 13794 | { |
| 13795 | type = TREE_TYPE (type); |
| 13796 | if (POINTER_TYPE_P (type) |
| 13797 | && TREE_CODE (type) != ARRAY_TYPE |
| 13798 | && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR |
| 13799 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR |
| 13800 | && !omp_is_allocatable_or_ptr (decl: ovar)) |
| 13801 | || (omp_privatize_by_reference (decl: ovar) |
| 13802 | && omp_is_allocatable_or_ptr (decl: ovar)))) |
| 13803 | var = build_simple_mem_ref (var); |
| 13804 | var = fold_convert (TREE_TYPE (x), var); |
| 13805 | } |
| 13806 | } |
| 13807 | tree present; |
| 13808 | present = omp_check_optional_argument (decl: ovar, for_present_check: true); |
| 13809 | if (present) |
| 13810 | { |
| 13811 | tree null_label = create_artificial_label (UNKNOWN_LOCATION); |
| 13812 | tree notnull_label = create_artificial_label (UNKNOWN_LOCATION); |
| 13813 | tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION); |
| 13814 | tree new_x = unshare_expr (x); |
| 13815 | gimplify_expr (&present, &ilist, NULL, is_gimple_val, |
| 13816 | fb_rvalue); |
| 13817 | gcond *cond = gimple_build_cond_from_tree (present, |
| 13818 | notnull_label, |
| 13819 | null_label); |
| 13820 | gimple_seq_add_stmt (&ilist, cond); |
| 13821 | gimple_seq_add_stmt (&ilist, gimple_build_label (label: null_label)); |
| 13822 | gimplify_assign (new_x, null_pointer_node, &ilist); |
| 13823 | gimple_seq_add_stmt (&ilist, gimple_build_goto (dest: opt_arg_label)); |
| 13824 | gimple_seq_add_stmt (&ilist, |
| 13825 | gimple_build_label (label: notnull_label)); |
| 13826 | gimplify_assign (x, var, &ilist); |
| 13827 | gimple_seq_add_stmt (&ilist, |
| 13828 | gimple_build_label (label: opt_arg_label)); |
| 13829 | } |
| 13830 | else |
| 13831 | gimplify_assign (x, var, &ilist); |
| 13832 | s = size_int (0); |
| 13833 | purpose = size_int (map_idx++); |
| 13834 | CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); |
| 13835 | gcc_checking_assert (tkind |
| 13836 | < (HOST_WIDE_INT_C (1U) << talign_shift)); |
| 13837 | gcc_checking_assert (tkind |
| 13838 | <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); |
| 13839 | CONSTRUCTOR_APPEND_ELT (vkind, purpose, |
| 13840 | build_int_cstu (tkind_type, tkind)); |
| 13841 | break; |
| 13842 | } |
| 13843 | |
| 13844 | gcc_assert (map_idx == map_cnt); |
| 13845 | |
| 13846 | if (!deep_map_cnt) |
| 13847 | { |
| 13848 | DECL_INITIAL (TREE_VEC_ELT (t, 1)) |
| 13849 | = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize); |
| 13850 | DECL_INITIAL (TREE_VEC_ELT (t, 2)) |
| 13851 | = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind); |
| 13852 | } |
| 13853 | for (int i = 1; i <= 2; i++) |
| 13854 | if (deep_map_cnt || !TREE_STATIC (TREE_VEC_ELT (t, i))) |
| 13855 | { |
| 13856 | tree tmp = TREE_VEC_ELT (t, i); |
| 13857 | if (deep_map_cnt) |
| 13858 | { |
| 13859 | const char *prefix = (i == 1 ? ".omp_data_sizes0" |
| 13860 | : ".omp_data_kinds0" ); |
| 13861 | tree type = (i == 1) ? size_type_node : tkind_type; |
| 13862 | type = build_array_type_nelts (type, map_cnt); |
| 13863 | tree var = create_tmp_var (type, prefix); |
| 13864 | DECL_NAMELESS (var) = 1; |
| 13865 | TREE_ADDRESSABLE (var) = 1; |
| 13866 | TREE_STATIC (var) = TREE_STATIC (tmp); |
| 13867 | DECL_INITIAL (var) = build_constructor (type, i == 1 |
| 13868 | ? vsize : vkind); |
| 13869 | tmp = var; |
| 13870 | TREE_STATIC (TREE_VEC_ELT (t, i)) = 0; |
| 13871 | } |
| 13872 | |
| 13873 | gimple_seq initlist = NULL; |
| 13874 | force_gimple_operand (build1 (DECL_EXPR, void_type_node, tmp), |
| 13875 | &initlist, true, NULL_TREE); |
| 13876 | gimple_seq_add_seq (&ilist, initlist); |
| 13877 | |
| 13878 | if (deep_map_cnt) |
| 13879 | { |
| 13880 | tree tmp2; |
| 13881 | tree call = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY); |
| 13882 | tmp2 = TYPE_SIZE_UNIT (TREE_TYPE (tmp)); |
| 13883 | call = build_call_expr_loc (input_location, call, 3, |
| 13884 | TREE_VEC_ELT (t, i), |
| 13885 | build_fold_addr_expr (tmp), tmp2); |
| 13886 | gimplify_and_add (call, &ilist); |
| 13887 | } |
| 13888 | |
| 13889 | if (!TREE_STATIC (tmp)) |
| 13890 | { |
| 13891 | tree clobber = build_clobber (TREE_TYPE (tmp)); |
| 13892 | gimple_seq_add_stmt (&olist, |
| 13893 | gimple_build_assign (tmp, clobber)); |
| 13894 | } |
| 13895 | if (deep_map_cnt) |
| 13896 | { |
| 13897 | tmp = TREE_VEC_ELT (t, i); |
| 13898 | tree call = builtin_decl_explicit (fncode: BUILT_IN_FREE); |
| 13899 | call = build_call_expr_loc (input_location, call, 1, tmp); |
| 13900 | gimplify_and_add (call, &olist); |
| 13901 | tree clobber = build_clobber (TREE_TYPE (tmp)); |
| 13902 | gimple_seq_add_stmt (&olist, |
| 13903 | gimple_build_assign (tmp, clobber)); |
| 13904 | } |
| 13905 | } |
| 13906 | else if (omp_maybe_offloaded_ctx (ctx: ctx->outer)) |
| 13907 | { |
| 13908 | tree id = get_identifier ("omp declare target" ); |
| 13909 | tree decl = TREE_VEC_ELT (t, i); |
| 13910 | DECL_ATTRIBUTES (decl) |
| 13911 | = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl)); |
| 13912 | varpool_node *node = varpool_node::get (decl); |
| 13913 | if (node) |
| 13914 | { |
| 13915 | node->offloadable = 1; |
| 13916 | if (ENABLE_OFFLOADING) |
| 13917 | { |
| 13918 | g->have_offload = true; |
| 13919 | vec_safe_push (v&: offload_vars, obj: t); |
| 13920 | } |
| 13921 | } |
| 13922 | } |
| 13923 | |
| 13924 | if (deep_map_cnt) |
| 13925 | { |
| 13926 | tree call = builtin_decl_explicit (fncode: BUILT_IN_FREE); |
| 13927 | call = build_call_expr_loc (input_location, call, 1, |
| 13928 | TREE_VEC_ELT (t, 0)); |
| 13929 | gimplify_and_add (call, &olist); |
| 13930 | |
| 13931 | gimplify_expr (&TREE_VEC_ELT (t, 1), &ilist, NULL, is_gimple_val, |
| 13932 | fb_rvalue); |
| 13933 | } |
| 13934 | |
| 13935 | tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl)); |
| 13936 | gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl, |
| 13937 | clobber)); |
| 13938 | } |
| 13939 | |
| 13940 | /* Once all the expansions are done, sequence all the different |
| 13941 | fragments inside gimple_omp_body. */ |
| 13942 | |
| 13943 | new_body = NULL; |
| 13944 | |
| 13945 | if (offloaded |
| 13946 | && ctx->record_type) |
| 13947 | { |
| 13948 | t = ctx->sender_decl; |
| 13949 | if (!deep_map_cnt) |
| 13950 | t = build_fold_addr_expr_loc (loc, t); |
| 13951 | /* fixup_child_record_type might have changed receiver_decl's type. */ |
| 13952 | t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t); |
| 13953 | if (!AGGREGATE_TYPE_P (TREE_TYPE (ctx->sender_decl))) |
| 13954 | gimplify_assign (ctx->receiver_decl, t, &new_body); |
| 13955 | else |
| 13956 | gimple_seq_add_stmt (&new_body, |
| 13957 | gimple_build_assign (ctx->receiver_decl, t)); |
| 13958 | } |
| 13959 | gimple_seq_add_seq (&new_body, fplist); |
| 13960 | |
| 13961 | if (offloaded || data_region) |
| 13962 | { |
| 13963 | tree prev = NULL_TREE; |
| 13964 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
| 13965 | switch (OMP_CLAUSE_CODE (c)) |
| 13966 | { |
| 13967 | tree var, x; |
| 13968 | default: |
| 13969 | break; |
| 13970 | case OMP_CLAUSE_FIRSTPRIVATE: |
| 13971 | omp_firstprivatize_data_region: |
| 13972 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 13973 | break; |
| 13974 | var = OMP_CLAUSE_DECL (c); |
| 13975 | if (omp_privatize_by_reference (decl: var) |
| 13976 | || is_gimple_reg_type (TREE_TYPE (var))) |
| 13977 | { |
| 13978 | tree new_var = lookup_decl (var, ctx); |
| 13979 | tree type; |
| 13980 | type = TREE_TYPE (var); |
| 13981 | if (omp_privatize_by_reference (decl: var)) |
| 13982 | type = TREE_TYPE (type); |
| 13983 | if ((INTEGRAL_TYPE_P (type) |
| 13984 | && TYPE_PRECISION (type) <= POINTER_SIZE) |
| 13985 | || TREE_CODE (type) == POINTER_TYPE) |
| 13986 | { |
| 13987 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 13988 | if (TREE_CODE (type) != POINTER_TYPE) |
| 13989 | x = fold_convert (pointer_sized_int_node, x); |
| 13990 | x = fold_convert (type, x); |
| 13991 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, |
| 13992 | fb_rvalue); |
| 13993 | if (omp_privatize_by_reference (decl: var)) |
| 13994 | { |
| 13995 | tree v = create_tmp_var_raw (type, get_name (var)); |
| 13996 | gimple_add_tmp_var (v); |
| 13997 | TREE_ADDRESSABLE (v) = 1; |
| 13998 | gimple_seq_add_stmt (&new_body, |
| 13999 | gimple_build_assign (v, x)); |
| 14000 | x = build_fold_addr_expr (v); |
| 14001 | } |
| 14002 | gimple_seq_add_stmt (&new_body, |
| 14003 | gimple_build_assign (new_var, x)); |
| 14004 | } |
| 14005 | else |
| 14006 | { |
| 14007 | bool by_ref = !omp_privatize_by_reference (decl: var); |
| 14008 | x = build_receiver_ref (var, by_ref, ctx); |
| 14009 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, |
| 14010 | fb_rvalue); |
| 14011 | gimple_seq_add_stmt (&new_body, |
| 14012 | gimple_build_assign (new_var, x)); |
| 14013 | } |
| 14014 | } |
| 14015 | else if (is_variable_sized (expr: var)) |
| 14016 | { |
| 14017 | tree pvar = DECL_VALUE_EXPR (var); |
| 14018 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 14019 | pvar = TREE_OPERAND (pvar, 0); |
| 14020 | gcc_assert (DECL_P (pvar)); |
| 14021 | tree new_var = lookup_decl (var: pvar, ctx); |
| 14022 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 14023 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); |
| 14024 | gimple_seq_add_stmt (&new_body, |
| 14025 | gimple_build_assign (new_var, x)); |
| 14026 | } |
| 14027 | break; |
| 14028 | case OMP_CLAUSE_PRIVATE: |
| 14029 | if (is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 14030 | break; |
| 14031 | var = OMP_CLAUSE_DECL (c); |
| 14032 | if (omp_privatize_by_reference (decl: var)) |
| 14033 | { |
| 14034 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 14035 | tree new_var = lookup_decl (var, ctx); |
| 14036 | x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); |
| 14037 | if (TREE_CONSTANT (x)) |
| 14038 | { |
| 14039 | x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)), |
| 14040 | get_name (var)); |
| 14041 | gimple_add_tmp_var (x); |
| 14042 | TREE_ADDRESSABLE (x) = 1; |
| 14043 | x = build_fold_addr_expr_loc (clause_loc, x); |
| 14044 | } |
| 14045 | else |
| 14046 | break; |
| 14047 | |
| 14048 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 14049 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); |
| 14050 | gimple_seq_add_stmt (&new_body, |
| 14051 | gimple_build_assign (new_var, x)); |
| 14052 | } |
| 14053 | break; |
| 14054 | case OMP_CLAUSE_USE_DEVICE_PTR: |
| 14055 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
| 14056 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
| 14057 | case OMP_CLAUSE_IS_DEVICE_PTR: |
| 14058 | tree new_var; |
| 14059 | gimple_seq assign_body; |
| 14060 | bool is_array_data; |
| 14061 | bool do_optional_check; |
| 14062 | assign_body = NULL; |
| 14063 | do_optional_check = false; |
| 14064 | var = OMP_CLAUSE_DECL (c); |
| 14065 | is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL; |
| 14066 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data) |
| 14067 | goto omp_firstprivatize_data_region; |
| 14068 | |
| 14069 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR |
| 14070 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 14071 | x = build_sender_ref (key: is_array_data |
| 14072 | ? (splay_tree_key) &DECL_NAME (var) |
| 14073 | : (splay_tree_key) &DECL_UID (var), ctx); |
| 14074 | else |
| 14075 | { |
| 14076 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 14077 | { |
| 14078 | while (TREE_CODE (var) == INDIRECT_REF |
| 14079 | || TREE_CODE (var) == ARRAY_REF) |
| 14080 | var = TREE_OPERAND (var, 0); |
| 14081 | } |
| 14082 | x = build_receiver_ref (var, by_ref: false, ctx); |
| 14083 | } |
| 14084 | |
| 14085 | if (is_array_data) |
| 14086 | { |
| 14087 | bool is_ref = omp_privatize_by_reference (decl: var); |
| 14088 | do_optional_check = true; |
| 14089 | /* First, we copy the descriptor data from the host; then |
| 14090 | we update its data to point to the target address. */ |
| 14091 | new_var = lookup_decl (var, ctx); |
| 14092 | new_var = DECL_VALUE_EXPR (new_var); |
| 14093 | tree v = new_var; |
| 14094 | tree v2 = var; |
| 14095 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
| 14096 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR) |
| 14097 | v2 = maybe_lookup_decl_in_outer_ctx (decl: var, ctx); |
| 14098 | |
| 14099 | if (is_ref) |
| 14100 | { |
| 14101 | v2 = build_fold_indirect_ref (v2); |
| 14102 | v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var)); |
| 14103 | gimple_add_tmp_var (v); |
| 14104 | TREE_ADDRESSABLE (v) = 1; |
| 14105 | gimplify_assign (v, v2, &assign_body); |
| 14106 | tree rhs = build_fold_addr_expr (v); |
| 14107 | gimple_seq_add_stmt (&assign_body, |
| 14108 | gimple_build_assign (new_var, rhs)); |
| 14109 | } |
| 14110 | else |
| 14111 | gimplify_assign (new_var, v2, &assign_body); |
| 14112 | |
| 14113 | v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false); |
| 14114 | gcc_assert (v2); |
| 14115 | gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); |
| 14116 | gimple_seq_add_stmt (&assign_body, |
| 14117 | gimple_build_assign (v2, x)); |
| 14118 | } |
| 14119 | else if (is_variable_sized (expr: var)) |
| 14120 | { |
| 14121 | tree pvar = DECL_VALUE_EXPR (var); |
| 14122 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 14123 | pvar = TREE_OPERAND (pvar, 0); |
| 14124 | gcc_assert (DECL_P (pvar)); |
| 14125 | new_var = lookup_decl (var: pvar, ctx); |
| 14126 | gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); |
| 14127 | gimple_seq_add_stmt (&assign_body, |
| 14128 | gimple_build_assign (new_var, x)); |
| 14129 | } |
| 14130 | else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR |
| 14131 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 14132 | && !omp_privatize_by_reference (decl: var) |
| 14133 | && !omp_is_allocatable_or_ptr (decl: var)) |
| 14134 | || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) |
| 14135 | { |
| 14136 | new_var = lookup_decl (var, ctx); |
| 14137 | new_var = DECL_VALUE_EXPR (new_var); |
| 14138 | gcc_assert (TREE_CODE (new_var) == MEM_REF); |
| 14139 | new_var = TREE_OPERAND (new_var, 0); |
| 14140 | gcc_assert (DECL_P (new_var)); |
| 14141 | gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); |
| 14142 | gimple_seq_add_stmt (&assign_body, |
| 14143 | gimple_build_assign (new_var, x)); |
| 14144 | } |
| 14145 | else |
| 14146 | { |
| 14147 | tree type = TREE_TYPE (var); |
| 14148 | new_var = lookup_decl (var, ctx); |
| 14149 | if (omp_privatize_by_reference (decl: var)) |
| 14150 | { |
| 14151 | type = TREE_TYPE (type); |
| 14152 | if (POINTER_TYPE_P (type) |
| 14153 | && TREE_CODE (type) != ARRAY_TYPE |
| 14154 | && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR |
| 14155 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR) |
| 14156 | || (omp_privatize_by_reference (decl: var) |
| 14157 | && omp_is_allocatable_or_ptr (decl: var)))) |
| 14158 | { |
| 14159 | tree v = create_tmp_var_raw (type, get_name (var)); |
| 14160 | gimple_add_tmp_var (v); |
| 14161 | TREE_ADDRESSABLE (v) = 1; |
| 14162 | x = fold_convert (type, x); |
| 14163 | gimplify_expr (&x, &assign_body, NULL, is_gimple_val, |
| 14164 | fb_rvalue); |
| 14165 | gimple_seq_add_stmt (&assign_body, |
| 14166 | gimple_build_assign (v, x)); |
| 14167 | x = build_fold_addr_expr (v); |
| 14168 | do_optional_check = true; |
| 14169 | } |
| 14170 | } |
| 14171 | new_var = DECL_VALUE_EXPR (new_var); |
| 14172 | x = fold_convert (TREE_TYPE (new_var), x); |
| 14173 | gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); |
| 14174 | gimple_seq_add_stmt (&assign_body, |
| 14175 | gimple_build_assign (new_var, x)); |
| 14176 | } |
| 14177 | tree present; |
| 14178 | present = ((do_optional_check |
| 14179 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR |
| 14180 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR) |
| 14181 | ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), for_present_check: true) |
| 14182 | : NULL_TREE); |
| 14183 | if (present) |
| 14184 | { |
| 14185 | tree null_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14186 | tree notnull_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14187 | tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14188 | glabel *null_glabel = gimple_build_label (label: null_label); |
| 14189 | glabel *notnull_glabel = gimple_build_label (label: notnull_label); |
| 14190 | ggoto *opt_arg_ggoto = gimple_build_goto (dest: opt_arg_label); |
| 14191 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, |
| 14192 | fb_rvalue); |
| 14193 | gimplify_expr (&present, &new_body, NULL, is_gimple_val, |
| 14194 | fb_rvalue); |
| 14195 | gcond *cond = gimple_build_cond_from_tree (present, |
| 14196 | notnull_label, |
| 14197 | null_label); |
| 14198 | gimple_seq_add_stmt (&new_body, cond); |
| 14199 | gimple_seq_add_stmt (&new_body, null_glabel); |
| 14200 | gimplify_assign (new_var, null_pointer_node, &new_body); |
| 14201 | gimple_seq_add_stmt (&new_body, opt_arg_ggoto); |
| 14202 | gimple_seq_add_stmt (&new_body, notnull_glabel); |
| 14203 | gimple_seq_add_seq (&new_body, assign_body); |
| 14204 | gimple_seq_add_stmt (&new_body, |
| 14205 | gimple_build_label (label: opt_arg_label)); |
| 14206 | } |
| 14207 | else |
| 14208 | gimple_seq_add_seq (&new_body, assign_body); |
| 14209 | break; |
| 14210 | } |
| 14211 | /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass, |
| 14212 | so that firstprivate vars holding OMP_CLAUSE_SIZE if needed |
| 14213 | are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs |
| 14214 | or references to VLAs. */ |
| 14215 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 14216 | switch (OMP_CLAUSE_CODE (c)) |
| 14217 | { |
| 14218 | tree var; |
| 14219 | default: |
| 14220 | break; |
| 14221 | case OMP_CLAUSE_MAP: |
| 14222 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER |
| 14223 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
| 14224 | { |
| 14225 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 14226 | poly_int64 offset = 0; |
| 14227 | gcc_assert (prev); |
| 14228 | var = OMP_CLAUSE_DECL (c); |
| 14229 | if (DECL_P (var) |
| 14230 | && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE |
| 14231 | && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var, |
| 14232 | ctx)) |
| 14233 | && varpool_node::get_create (decl: var)->offloadable) |
| 14234 | break; |
| 14235 | if (TREE_CODE (var) == INDIRECT_REF |
| 14236 | && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF) |
| 14237 | var = TREE_OPERAND (var, 0); |
| 14238 | if (TREE_CODE (var) == COMPONENT_REF) |
| 14239 | { |
| 14240 | var = get_addr_base_and_unit_offset (var, &offset); |
| 14241 | gcc_assert (var != NULL_TREE && DECL_P (var)); |
| 14242 | } |
| 14243 | else if (DECL_SIZE (var) |
| 14244 | && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST) |
| 14245 | { |
| 14246 | tree var2 = DECL_VALUE_EXPR (var); |
| 14247 | gcc_assert (TREE_CODE (var2) == INDIRECT_REF); |
| 14248 | var2 = TREE_OPERAND (var2, 0); |
| 14249 | gcc_assert (DECL_P (var2)); |
| 14250 | var = var2; |
| 14251 | } |
| 14252 | tree new_var = lookup_decl (var, ctx), x; |
| 14253 | tree type = TREE_TYPE (new_var); |
| 14254 | bool is_ref; |
| 14255 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF |
| 14256 | && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) |
| 14257 | == COMPONENT_REF)) |
| 14258 | { |
| 14259 | type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)); |
| 14260 | is_ref = true; |
| 14261 | new_var = build2 (MEM_REF, type, |
| 14262 | build_fold_addr_expr (new_var), |
| 14263 | build_int_cst (build_pointer_type (type), |
| 14264 | offset)); |
| 14265 | } |
| 14266 | else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF) |
| 14267 | { |
| 14268 | type = TREE_TYPE (OMP_CLAUSE_DECL (c)); |
| 14269 | is_ref = TREE_CODE (type) == REFERENCE_TYPE; |
| 14270 | new_var = build2 (MEM_REF, type, |
| 14271 | build_fold_addr_expr (new_var), |
| 14272 | build_int_cst (build_pointer_type (type), |
| 14273 | offset)); |
| 14274 | } |
| 14275 | else |
| 14276 | is_ref = omp_privatize_by_reference (decl: var); |
| 14277 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
| 14278 | is_ref = false; |
| 14279 | bool ref_to_array = false; |
| 14280 | bool ref_to_ptr = false; |
| 14281 | if (is_ref) |
| 14282 | { |
| 14283 | type = TREE_TYPE (type); |
| 14284 | if (TREE_CODE (type) == ARRAY_TYPE) |
| 14285 | { |
| 14286 | type = build_pointer_type (type); |
| 14287 | ref_to_array = true; |
| 14288 | } |
| 14289 | } |
| 14290 | else if (TREE_CODE (type) == ARRAY_TYPE) |
| 14291 | { |
| 14292 | tree decl2 = DECL_VALUE_EXPR (new_var); |
| 14293 | gcc_assert (TREE_CODE (decl2) == MEM_REF); |
| 14294 | decl2 = TREE_OPERAND (decl2, 0); |
| 14295 | gcc_assert (DECL_P (decl2)); |
| 14296 | new_var = decl2; |
| 14297 | type = TREE_TYPE (new_var); |
| 14298 | } |
| 14299 | else if (TREE_CODE (type) == REFERENCE_TYPE |
| 14300 | && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE) |
| 14301 | { |
| 14302 | type = TREE_TYPE (type); |
| 14303 | ref_to_ptr = true; |
| 14304 | } |
| 14305 | x = build_receiver_ref (OMP_CLAUSE_DECL (prev), by_ref: false, ctx); |
| 14306 | x = fold_convert_loc (clause_loc, type, x); |
| 14307 | if (!integer_zerop (OMP_CLAUSE_SIZE (c))) |
| 14308 | { |
| 14309 | tree bias = OMP_CLAUSE_SIZE (c); |
| 14310 | if (DECL_P (bias)) |
| 14311 | bias = lookup_decl (var: bias, ctx); |
| 14312 | bias = fold_convert_loc (clause_loc, sizetype, bias); |
| 14313 | bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype, |
| 14314 | bias); |
| 14315 | x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, |
| 14316 | TREE_TYPE (x), x, bias); |
| 14317 | } |
| 14318 | if (ref_to_array) |
| 14319 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 14320 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); |
| 14321 | if ((is_ref && !ref_to_array) |
| 14322 | || ref_to_ptr) |
| 14323 | { |
| 14324 | tree t = create_tmp_var_raw (type, get_name (var)); |
| 14325 | gimple_add_tmp_var (t); |
| 14326 | TREE_ADDRESSABLE (t) = 1; |
| 14327 | gimple_seq_add_stmt (&new_body, |
| 14328 | gimple_build_assign (t, x)); |
| 14329 | x = build_fold_addr_expr_loc (clause_loc, t); |
| 14330 | } |
| 14331 | gimple_seq_add_stmt (&new_body, |
| 14332 | gimple_build_assign (new_var, x)); |
| 14333 | prev = NULL_TREE; |
| 14334 | } |
| 14335 | else if (OMP_CLAUSE_CHAIN (c) |
| 14336 | && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) |
| 14337 | == OMP_CLAUSE_MAP |
| 14338 | && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) |
| 14339 | == GOMP_MAP_FIRSTPRIVATE_POINTER |
| 14340 | || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) |
| 14341 | == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) |
| 14342 | prev = c; |
| 14343 | break; |
| 14344 | case OMP_CLAUSE_PRIVATE: |
| 14345 | var = OMP_CLAUSE_DECL (c); |
| 14346 | if (is_variable_sized (expr: var)) |
| 14347 | { |
| 14348 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 14349 | tree new_var = lookup_decl (var, ctx); |
| 14350 | tree pvar = DECL_VALUE_EXPR (var); |
| 14351 | gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); |
| 14352 | pvar = TREE_OPERAND (pvar, 0); |
| 14353 | gcc_assert (DECL_P (pvar)); |
| 14354 | tree new_pvar = lookup_decl (var: pvar, ctx); |
| 14355 | tree atmp = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN); |
| 14356 | tree al = size_int (DECL_ALIGN (var)); |
| 14357 | tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var)); |
| 14358 | x = build_call_expr_loc (clause_loc, atmp, 2, x, al); |
| 14359 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x); |
| 14360 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); |
| 14361 | gimple_seq_add_stmt (&new_body, |
| 14362 | gimple_build_assign (new_pvar, x)); |
| 14363 | } |
| 14364 | else if (omp_privatize_by_reference (decl: var) |
| 14365 | && !is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 14366 | { |
| 14367 | location_t clause_loc = OMP_CLAUSE_LOCATION (c); |
| 14368 | tree new_var = lookup_decl (var, ctx); |
| 14369 | tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); |
| 14370 | if (TREE_CONSTANT (x)) |
| 14371 | break; |
| 14372 | else |
| 14373 | { |
| 14374 | tree atmp |
| 14375 | = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN); |
| 14376 | tree rtype = TREE_TYPE (TREE_TYPE (new_var)); |
| 14377 | tree al = size_int (TYPE_ALIGN (rtype)); |
| 14378 | x = build_call_expr_loc (clause_loc, atmp, 2, x, al); |
| 14379 | } |
| 14380 | |
| 14381 | x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); |
| 14382 | gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); |
| 14383 | gimple_seq_add_stmt (&new_body, |
| 14384 | gimple_build_assign (new_var, x)); |
| 14385 | } |
| 14386 | break; |
| 14387 | } |
| 14388 | |
| 14389 | gimple_seq fork_seq = NULL; |
| 14390 | gimple_seq join_seq = NULL; |
| 14391 | |
| 14392 | if (offloaded && is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 14393 | { |
| 14394 | /* If there are reductions on the offloaded region itself, treat |
| 14395 | them as a dummy GANG loop. */ |
| 14396 | tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG); |
| 14397 | |
| 14398 | gcall *private_marker = lower_oacc_private_marker (ctx); |
| 14399 | |
| 14400 | if (private_marker) |
| 14401 | gimple_call_set_arg (gs: private_marker, index: 2, arg: level); |
| 14402 | |
| 14403 | lower_oacc_reductions (loc: gimple_location (g: ctx->stmt), clauses, level, |
| 14404 | inner: false, NULL, private_marker, NULL, fork_seq: &fork_seq, |
| 14405 | join_seq: &join_seq, ctx); |
| 14406 | } |
| 14407 | |
| 14408 | gimple_seq_add_seq (&new_body, fork_seq); |
| 14409 | gimple_seq_add_seq (&new_body, tgt_body); |
| 14410 | gimple_seq_add_seq (&new_body, join_seq); |
| 14411 | |
| 14412 | if (offloaded) |
| 14413 | { |
| 14414 | new_body = maybe_catch_exception (body: new_body); |
| 14415 | gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false)); |
| 14416 | } |
| 14417 | gimple_omp_set_body (gs: stmt, body: new_body); |
| 14418 | } |
| 14419 | |
| 14420 | gsi_insert_seq_before (gsi_p, gimple_omp_target_iterator_loops (omp_target_stmt: stmt), |
| 14421 | GSI_SAME_STMT); |
| 14422 | gimple_omp_target_set_iterator_loops (omp_target_stmt: stmt, NULL); |
| 14423 | bind = gimple_build_bind (NULL, NULL, |
| 14424 | tgt_bind ? gimple_bind_block (bind_stmt: tgt_bind) |
| 14425 | : NULL_TREE); |
| 14426 | gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true); |
| 14427 | gimple_bind_add_seq (bind_stmt: bind, seq: ilist); |
| 14428 | gimple_bind_add_stmt (bind_stmt: bind, stmt); |
| 14429 | gimple_bind_add_seq (bind_stmt: bind, seq: olist); |
| 14430 | |
| 14431 | pop_gimplify_context (NULL); |
| 14432 | |
| 14433 | if (dep_bind) |
| 14434 | { |
| 14435 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist); |
| 14436 | gimple_bind_add_stmt (bind_stmt: dep_bind, stmt: bind); |
| 14437 | gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist); |
| 14438 | pop_gimplify_context (dep_bind); |
| 14439 | } |
| 14440 | } |
| 14441 | |
| 14442 | /* Generate code to implement the action-clauses (destroy, init, use) of an |
| 14443 | OpenMP interop construct. */ |
| 14444 | |
| 14445 | static void |
| 14446 | lower_omp_interop_action_clauses (gimple_seq *seq, vec<tree> &objs, |
| 14447 | vec<tree> *interop_types = NULL, |
| 14448 | vec<tree> *prefer_types = NULL) |
| 14449 | { |
| 14450 | if (objs.length () == 0) |
| 14451 | return; |
| 14452 | |
| 14453 | enum omp_clause_code action = OMP_CLAUSE_CODE (objs[0]); |
| 14454 | if (action == OMP_CLAUSE_INIT) |
| 14455 | gcc_checking_assert (objs.length () == interop_types->length () |
| 14456 | && objs.length () == prefer_types->length ()); |
| 14457 | else |
| 14458 | gcc_assert (prefer_types == NULL && interop_types == NULL); |
| 14459 | |
| 14460 | tree ret_objs = NULL_TREE, ret_interop_types = NULL_TREE, |
| 14461 | ret_prefer_types = NULL_TREE; |
| 14462 | |
| 14463 | /* Build an array of interop objects. */ |
| 14464 | |
| 14465 | tree type_obj_pref = build_array_type_nelts (ptr_type_node, objs.length ()); |
| 14466 | ret_objs = create_tmp_var (type_obj_pref, "interopobjs" ); |
| 14467 | |
| 14468 | bool have_pref_type = false; |
| 14469 | if (action == OMP_CLAUSE_INIT) |
| 14470 | { |
| 14471 | for (tree pref_type : prefer_types) |
| 14472 | if (pref_type != NULL_TREE) |
| 14473 | { |
| 14474 | have_pref_type = true; |
| 14475 | break; |
| 14476 | } |
| 14477 | tree type_tgtsync |
| 14478 | = build_array_type_nelts (integer_type_node, objs.length ()); |
| 14479 | ret_interop_types = create_tmp_var (type_tgtsync, "tgt_tgtsync" ); |
| 14480 | if (have_pref_type) |
| 14481 | ret_prefer_types = create_tmp_var (type_obj_pref, "pref_type" ); |
| 14482 | else |
| 14483 | { |
| 14484 | ret_prefer_types = null_pointer_node; |
| 14485 | prefer_types->truncate (size: 0); |
| 14486 | } |
| 14487 | } |
| 14488 | |
| 14489 | for (size_t i = 0; !objs.is_empty (); i++) |
| 14490 | { |
| 14491 | tree offset = build_int_cst (integer_type_node, i); |
| 14492 | tree init = build4 (ARRAY_REF, ptr_type_node, ret_objs, offset, NULL_TREE, |
| 14493 | NULL_TREE); |
| 14494 | tree obj = OMP_CLAUSE_DECL (objs.pop ()); |
| 14495 | if (TREE_CODE (TREE_TYPE (obj)) == REFERENCE_TYPE) |
| 14496 | obj = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (obj)), obj); |
| 14497 | if (action != OMP_CLAUSE_USE |
| 14498 | && TREE_CODE (TREE_TYPE (obj)) != POINTER_TYPE) |
| 14499 | /* For modifying actions, we need a pointer. */ |
| 14500 | obj = build_fold_addr_expr (obj); |
| 14501 | else if (action == OMP_CLAUSE_USE |
| 14502 | && TREE_CODE (TREE_TYPE (obj)) == POINTER_TYPE) |
| 14503 | /* For use action, we need the value. */ |
| 14504 | obj = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (obj)), obj); |
| 14505 | init = build2 (MODIFY_EXPR, ptr_type_node, init, |
| 14506 | fold_convert (ptr_type_node, obj)); |
| 14507 | gimplify_and_add (init, seq); |
| 14508 | |
| 14509 | if (action == OMP_CLAUSE_INIT) |
| 14510 | { |
| 14511 | init = build4 (ARRAY_REF, integer_type_node, ret_interop_types, |
| 14512 | offset, NULL_TREE, NULL_TREE); |
| 14513 | init = build2 (MODIFY_EXPR, integer_type_node, init, |
| 14514 | interop_types->pop ()); |
| 14515 | gimplify_and_add (init, seq); |
| 14516 | |
| 14517 | if (have_pref_type) |
| 14518 | { |
| 14519 | tree prefer_type = prefer_types->pop (); |
| 14520 | tree pref = (prefer_type == NULL_TREE |
| 14521 | ? null_pointer_node |
| 14522 | : build_fold_addr_expr (prefer_type)); |
| 14523 | init = build4 (ARRAY_REF, ptr_type_node, ret_prefer_types, offset, |
| 14524 | NULL_TREE, NULL_TREE); |
| 14525 | init = build2 (MODIFY_EXPR, ptr_type_node, init, pref); |
| 14526 | gimplify_and_add (init, seq); |
| 14527 | } |
| 14528 | } |
| 14529 | } |
| 14530 | if (action == OMP_CLAUSE_INIT) |
| 14531 | { |
| 14532 | if (have_pref_type) |
| 14533 | ret_prefer_types = build_fold_addr_expr (ret_prefer_types); |
| 14534 | ret_interop_types = build_fold_addr_expr (ret_interop_types); |
| 14535 | } |
| 14536 | ret_objs = build_fold_addr_expr (ret_objs); |
| 14537 | |
| 14538 | gcc_assert (objs.is_empty () |
| 14539 | && (!interop_types || interop_types->is_empty ()) |
| 14540 | && (!prefer_types || prefer_types->is_empty ())); |
| 14541 | |
| 14542 | objs.safe_push (obj: ret_objs); |
| 14543 | if (action == OMP_CLAUSE_INIT) |
| 14544 | { |
| 14545 | interop_types->safe_push (obj: ret_interop_types); |
| 14546 | prefer_types->safe_push (obj: ret_prefer_types); |
| 14547 | } |
| 14548 | } |
| 14549 | |
| 14550 | /* Lower code for an OpenMP interop directive. */ |
| 14551 | |
| 14552 | static void |
| 14553 | lower_omp_interop (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 14554 | { |
| 14555 | push_gimplify_context (); |
| 14556 | |
| 14557 | tree block = make_node (BLOCK); |
| 14558 | gbind *bind = gimple_build_bind (NULL, NULL, block); |
| 14559 | gimple_seq bind_body = NULL; |
| 14560 | |
| 14561 | /* Emit call to GOMP_interop: |
| 14562 | void |
| 14563 | GOMP_interop (int device_num, int n_init, omp_interop_t **init, |
| 14564 | const void *target_targetsync, const void *prefer_type, |
| 14565 | int n_use, omp_interop_t *use, int n_destroy, |
| 14566 | omp_interop_t **destroy, unsigned int flags, |
| 14567 | void **depend) */ |
| 14568 | |
| 14569 | tree flags = NULL_TREE; |
| 14570 | tree depend = null_pointer_node; |
| 14571 | tree device_num = NULL_TREE; |
| 14572 | |
| 14573 | auto_vec<tree> init_objs, use_objs, destroy_objs, prefer_type, |
| 14574 | target_targetsync; |
| 14575 | gimple_seq dep_ilist = NULL, dep_olist = NULL; |
| 14576 | tree clauses = gimple_omp_interop_clauses (gs: gsi_stmt (i: *gsi_p)); |
| 14577 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
| 14578 | { |
| 14579 | switch (OMP_CLAUSE_CODE (c)) |
| 14580 | { |
| 14581 | case OMP_CLAUSE_INIT: |
| 14582 | { |
| 14583 | init_objs.safe_push (obj: c); |
| 14584 | int target_targetsync_bits = 0; |
| 14585 | if (OMP_CLAUSE_INIT_TARGET (c)) |
| 14586 | target_targetsync_bits |= GOMP_INTEROP_TARGET; |
| 14587 | if (OMP_CLAUSE_INIT_TARGETSYNC (c)) |
| 14588 | target_targetsync_bits |= GOMP_INTEROP_TARGETSYNC; |
| 14589 | tree t = build_int_cst (integer_type_node, target_targetsync_bits); |
| 14590 | target_targetsync.safe_push (obj: t); |
| 14591 | prefer_type.safe_push (OMP_CLAUSE_INIT_PREFER_TYPE (c)); |
| 14592 | } |
| 14593 | break; |
| 14594 | case OMP_CLAUSE_USE: |
| 14595 | use_objs.safe_push (obj: c); |
| 14596 | break; |
| 14597 | case OMP_CLAUSE_DESTROY: |
| 14598 | destroy_objs.safe_push (obj: c); |
| 14599 | break; |
| 14600 | case OMP_CLAUSE_NOWAIT: |
| 14601 | flags = build_int_cst (integer_type_node, GOMP_INTEROP_FLAG_NOWAIT); |
| 14602 | break; |
| 14603 | case OMP_CLAUSE_DEPEND: |
| 14604 | { |
| 14605 | tree *cp = gimple_omp_interop_clauses_ptr (gs: gsi_stmt (i: *gsi_p)); |
| 14606 | lower_depend_clauses (pclauses: cp, iseq: &dep_ilist, oseq: &dep_olist); |
| 14607 | depend = OMP_CLAUSE_DECL (*cp); |
| 14608 | } |
| 14609 | break; |
| 14610 | case OMP_CLAUSE_DEVICE: |
| 14611 | device_num = OMP_CLAUSE_DEVICE_ID (c); |
| 14612 | break; |
| 14613 | default: |
| 14614 | gcc_unreachable (); |
| 14615 | } |
| 14616 | } |
| 14617 | |
| 14618 | if (flags == NULL_TREE) |
| 14619 | flags = build_int_cst (integer_type_node, 0); |
| 14620 | |
| 14621 | if (device_num == NULL_TREE) |
| 14622 | device_num = build_int_cst (integer_type_node, GOMP_DEVICE_DEFAULT_OMP_61); |
| 14623 | |
| 14624 | tree n_init = build_int_cst (integer_type_node, init_objs.length ()); |
| 14625 | tree n_use = build_int_cst (integer_type_node, use_objs.length ()); |
| 14626 | tree n_destroy = build_int_cst (integer_type_node, destroy_objs.length ()); |
| 14627 | |
| 14628 | lower_omp_interop_action_clauses (seq: &bind_body, objs&: init_objs, interop_types: &target_targetsync, |
| 14629 | prefer_types: &prefer_type); |
| 14630 | lower_omp_interop_action_clauses (seq: &bind_body, objs&: use_objs); |
| 14631 | lower_omp_interop_action_clauses (seq: &bind_body, objs&: destroy_objs); |
| 14632 | |
| 14633 | gimple_seq_add_seq (&bind_body, dep_ilist); |
| 14634 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_INTEROP); |
| 14635 | tree init_arg = init_objs.length () ? init_objs.pop () : null_pointer_node; |
| 14636 | tree target_targetsync_arg = target_targetsync.length () |
| 14637 | ? target_targetsync.pop () |
| 14638 | : null_pointer_node; |
| 14639 | tree prefer_type_arg |
| 14640 | = prefer_type.length () ? prefer_type.pop () : null_pointer_node; |
| 14641 | tree use_arg = use_objs.length () ? use_objs.pop () : null_pointer_node; |
| 14642 | tree destroy_arg |
| 14643 | = destroy_objs.length () ? destroy_objs.pop () : null_pointer_node; |
| 14644 | gcall *call |
| 14645 | = gimple_build_call (fn, 11, device_num, n_init, init_arg, |
| 14646 | target_targetsync_arg, prefer_type_arg, n_use, use_arg, |
| 14647 | n_destroy, destroy_arg, flags, depend); |
| 14648 | gimple_seq_add_stmt (&bind_body, call); |
| 14649 | gimple_seq_add_seq (&bind_body, dep_olist); |
| 14650 | |
| 14651 | gsi_replace (gsi_p, bind, true); |
| 14652 | gimple_bind_set_body (bind_stmt: bind, seq: bind_body); |
| 14653 | pop_gimplify_context (bind); |
| 14654 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 14655 | BLOCK_VARS (block) = ctx->block_vars; |
| 14656 | } |
| 14657 | |
| 14658 | /* Expand code for an OpenMP teams directive. */ |
| 14659 | |
| 14660 | static void |
| 14661 | lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 14662 | { |
| 14663 | gomp_teams *teams_stmt = as_a <gomp_teams *> (p: gsi_stmt (i: *gsi_p)); |
| 14664 | push_gimplify_context (); |
| 14665 | |
| 14666 | tree block = make_node (BLOCK); |
| 14667 | gbind *bind = gimple_build_bind (NULL, NULL, block); |
| 14668 | gsi_replace (gsi_p, bind, true); |
| 14669 | gimple_seq bind_body = NULL; |
| 14670 | gimple_seq dlist = NULL; |
| 14671 | gimple_seq olist = NULL; |
| 14672 | |
| 14673 | tree num_teams = omp_find_clause (clauses: gimple_omp_teams_clauses (gs: teams_stmt), |
| 14674 | kind: OMP_CLAUSE_NUM_TEAMS); |
| 14675 | tree num_teams_lower = NULL_TREE; |
| 14676 | if (num_teams == NULL_TREE) |
| 14677 | num_teams = build_int_cst (unsigned_type_node, 0); |
| 14678 | else |
| 14679 | { |
| 14680 | num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams); |
| 14681 | if (num_teams_lower) |
| 14682 | { |
| 14683 | num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower); |
| 14684 | gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val, |
| 14685 | fb_rvalue); |
| 14686 | } |
| 14687 | num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams); |
| 14688 | num_teams = fold_convert (unsigned_type_node, num_teams); |
| 14689 | gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue); |
| 14690 | } |
| 14691 | if (num_teams_lower == NULL_TREE) |
| 14692 | num_teams_lower = num_teams; |
| 14693 | tree thread_limit = omp_find_clause (clauses: gimple_omp_teams_clauses (gs: teams_stmt), |
| 14694 | kind: OMP_CLAUSE_THREAD_LIMIT); |
| 14695 | if (thread_limit == NULL_TREE) |
| 14696 | thread_limit = build_int_cst (unsigned_type_node, 0); |
| 14697 | else |
| 14698 | { |
| 14699 | thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit); |
| 14700 | thread_limit = fold_convert (unsigned_type_node, thread_limit); |
| 14701 | gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val, |
| 14702 | fb_rvalue); |
| 14703 | } |
| 14704 | location_t loc = gimple_location (g: teams_stmt); |
| 14705 | tree decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TEAMS4); |
| 14706 | tree rettype = TREE_TYPE (TREE_TYPE (decl)); |
| 14707 | tree first = create_tmp_var (rettype); |
| 14708 | gimple_seq_add_stmt (&bind_body, |
| 14709 | gimple_build_assign (first, build_one_cst (rettype))); |
| 14710 | tree llabel = create_artificial_label (loc); |
| 14711 | gimple_seq_add_stmt (&bind_body, gimple_build_label (label: llabel)); |
| 14712 | gimple *call |
| 14713 | = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit, |
| 14714 | first); |
| 14715 | gimple_set_location (g: call, location: loc); |
| 14716 | tree temp = create_tmp_var (rettype); |
| 14717 | gimple_call_set_lhs (gs: call, lhs: temp); |
| 14718 | gimple_seq_add_stmt (&bind_body, call); |
| 14719 | |
| 14720 | tree tlabel = create_artificial_label (loc); |
| 14721 | tree flabel = create_artificial_label (loc); |
| 14722 | gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype), |
| 14723 | tlabel, flabel); |
| 14724 | gimple_seq_add_stmt (&bind_body, cond); |
| 14725 | gimple_seq_add_stmt (&bind_body, gimple_build_label (label: tlabel)); |
| 14726 | gimple_seq_add_stmt (&bind_body, |
| 14727 | gimple_build_assign (first, build_zero_cst (rettype))); |
| 14728 | |
| 14729 | lower_rec_input_clauses (clauses: gimple_omp_teams_clauses (gs: teams_stmt), |
| 14730 | ilist: &bind_body, dlist: &dlist, ctx, NULL); |
| 14731 | lower_omp (gimple_omp_body_ptr (gs: teams_stmt), ctx); |
| 14732 | lower_reduction_clauses (clauses: gimple_omp_teams_clauses (gs: teams_stmt), stmt_seqp: &olist, |
| 14733 | NULL, ctx); |
| 14734 | gimple_seq_add_stmt (&bind_body, teams_stmt); |
| 14735 | |
| 14736 | gimple_seq_add_seq (&bind_body, gimple_omp_body (gs: teams_stmt)); |
| 14737 | gimple_omp_set_body (gs: teams_stmt, NULL); |
| 14738 | gimple_seq_add_seq (&bind_body, olist); |
| 14739 | gimple_seq_add_seq (&bind_body, dlist); |
| 14740 | gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true)); |
| 14741 | gimple_seq_add_stmt (&bind_body, gimple_build_goto (dest: llabel)); |
| 14742 | gimple_seq_add_stmt (&bind_body, gimple_build_label (label: flabel)); |
| 14743 | gimple_bind_set_body (bind_stmt: bind, seq: bind_body); |
| 14744 | |
| 14745 | pop_gimplify_context (bind); |
| 14746 | |
| 14747 | gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars); |
| 14748 | BLOCK_VARS (block) = ctx->block_vars; |
| 14749 | if (BLOCK_VARS (block)) |
| 14750 | TREE_USED (block) = 1; |
| 14751 | } |
| 14752 | |
| 14753 | /* Callback for lower_omp_1. Return non-NULL if *tp needs to be |
| 14754 | regimplified. If DATA is non-NULL, lower_omp_1 is outside |
| 14755 | of OMP context, but with make_addressable_vars set. */ |
| 14756 | |
| 14757 | static tree |
| 14758 | lower_omp_regimplify_p (tree *tp, int *walk_subtrees, |
| 14759 | void *data) |
| 14760 | { |
| 14761 | tree t = *tp; |
| 14762 | |
| 14763 | /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */ |
| 14764 | if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) |
| 14765 | && data == NULL |
| 14766 | && DECL_HAS_VALUE_EXPR_P (t)) |
| 14767 | return t; |
| 14768 | |
| 14769 | if (make_addressable_vars |
| 14770 | && DECL_P (t) |
| 14771 | && bitmap_bit_p (make_addressable_vars, DECL_UID (t))) |
| 14772 | return t; |
| 14773 | |
| 14774 | /* If a global variable has been privatized, TREE_CONSTANT on |
| 14775 | ADDR_EXPR might be wrong. */ |
| 14776 | if (data == NULL && TREE_CODE (t) == ADDR_EXPR) |
| 14777 | recompute_tree_invariant_for_addr_expr (t); |
| 14778 | |
| 14779 | *walk_subtrees = !IS_TYPE_OR_DECL_P (t); |
| 14780 | return NULL_TREE; |
| 14781 | } |
| 14782 | |
| 14783 | /* Data to be communicated between lower_omp_regimplify_operands and |
| 14784 | lower_omp_regimplify_operands_p. */ |
| 14785 | |
| 14786 | struct lower_omp_regimplify_operands_data |
| 14787 | { |
| 14788 | omp_context *ctx; |
| 14789 | vec<tree> *decls; |
| 14790 | }; |
| 14791 | |
| 14792 | /* Helper function for lower_omp_regimplify_operands. Find |
| 14793 | omp_member_access_dummy_var vars and adjust temporarily their |
| 14794 | DECL_VALUE_EXPRs if needed. */ |
| 14795 | |
| 14796 | static tree |
| 14797 | lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees, |
| 14798 | void *data) |
| 14799 | { |
| 14800 | tree t = omp_member_access_dummy_var (decl: *tp); |
| 14801 | if (t) |
| 14802 | { |
| 14803 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
| 14804 | lower_omp_regimplify_operands_data *ldata |
| 14805 | = (lower_omp_regimplify_operands_data *) wi->info; |
| 14806 | tree o = maybe_lookup_decl (var: t, ctx: ldata->ctx); |
| 14807 | if (o == NULL_TREE) |
| 14808 | o = maybe_lookup_decl_in_outer_ctx (decl: t, ctx: ldata->ctx); |
| 14809 | if (o != t) |
| 14810 | { |
| 14811 | ldata->decls->safe_push (DECL_VALUE_EXPR (*tp)); |
| 14812 | ldata->decls->safe_push (obj: *tp); |
| 14813 | tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), from: t, to: o); |
| 14814 | SET_DECL_VALUE_EXPR (*tp, v); |
| 14815 | } |
| 14816 | } |
| 14817 | *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp); |
| 14818 | return NULL_TREE; |
| 14819 | } |
| 14820 | |
| 14821 | /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs |
| 14822 | of omp_member_access_dummy_var vars during regimplification. */ |
| 14823 | |
| 14824 | static void |
| 14825 | lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt, |
| 14826 | gimple_stmt_iterator *gsi_p) |
| 14827 | { |
| 14828 | auto_vec<tree, 10> decls; |
| 14829 | if (ctx) |
| 14830 | { |
| 14831 | struct walk_stmt_info wi; |
| 14832 | memset (s: &wi, c: '\0', n: sizeof (wi)); |
| 14833 | struct lower_omp_regimplify_operands_data data; |
| 14834 | data.ctx = ctx; |
| 14835 | data.decls = &decls; |
| 14836 | wi.info = &data; |
| 14837 | walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi); |
| 14838 | } |
| 14839 | gimple_regimplify_operands (stmt, gsi_p); |
| 14840 | while (!decls.is_empty ()) |
| 14841 | { |
| 14842 | tree t = decls.pop (); |
| 14843 | tree v = decls.pop (); |
| 14844 | SET_DECL_VALUE_EXPR (t, v); |
| 14845 | } |
| 14846 | } |
| 14847 | |
| 14848 | static void |
| 14849 | lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx) |
| 14850 | { |
| 14851 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 14852 | struct walk_stmt_info wi; |
| 14853 | gcall *call_stmt; |
| 14854 | |
| 14855 | if (gimple_has_location (g: stmt)) |
| 14856 | input_location = gimple_location (g: stmt); |
| 14857 | |
| 14858 | if (make_addressable_vars) |
| 14859 | memset (s: &wi, c: '\0', n: sizeof (wi)); |
| 14860 | |
| 14861 | /* If we have issued syntax errors, avoid doing any heavy lifting. |
| 14862 | Just replace the OMP directives with a NOP to avoid |
| 14863 | confusing RTL expansion. */ |
| 14864 | if (seen_error () && is_gimple_omp (stmt)) |
| 14865 | { |
| 14866 | gsi_replace (gsi_p, gimple_build_nop (), true); |
| 14867 | return; |
| 14868 | } |
| 14869 | |
| 14870 | switch (gimple_code (g: stmt)) |
| 14871 | { |
| 14872 | case GIMPLE_COND: |
| 14873 | { |
| 14874 | gcond *cond_stmt = as_a <gcond *> (p: stmt); |
| 14875 | if ((ctx || make_addressable_vars) |
| 14876 | && (walk_tree (gimple_cond_lhs_ptr (cond_stmt), |
| 14877 | lower_omp_regimplify_p, |
| 14878 | ctx ? NULL : &wi, NULL) |
| 14879 | || walk_tree (gimple_cond_rhs_ptr (cond_stmt), |
| 14880 | lower_omp_regimplify_p, |
| 14881 | ctx ? NULL : &wi, NULL))) |
| 14882 | lower_omp_regimplify_operands (ctx, stmt: cond_stmt, gsi_p); |
| 14883 | } |
| 14884 | break; |
| 14885 | case GIMPLE_CATCH: |
| 14886 | lower_omp (gimple_catch_handler_ptr (catch_stmt: as_a <gcatch *> (p: stmt)), ctx); |
| 14887 | break; |
| 14888 | case GIMPLE_EH_FILTER: |
| 14889 | lower_omp (gimple_eh_filter_failure_ptr (gs: stmt), ctx); |
| 14890 | break; |
| 14891 | case GIMPLE_TRY: |
| 14892 | lower_omp (gimple_try_eval_ptr (gs: stmt), ctx); |
| 14893 | lower_omp (gimple_try_cleanup_ptr (gs: stmt), ctx); |
| 14894 | break; |
| 14895 | case GIMPLE_ASSUME: |
| 14896 | lower_omp (gimple_assume_body_ptr (gs: stmt), ctx); |
| 14897 | break; |
| 14898 | case GIMPLE_TRANSACTION: |
| 14899 | lower_omp (gimple_transaction_body_ptr (transaction_stmt: as_a <gtransaction *> (p: stmt)), |
| 14900 | ctx); |
| 14901 | break; |
| 14902 | case GIMPLE_BIND: |
| 14903 | if (ctx && is_gimple_omp_oacc (stmt: ctx->stmt)) |
| 14904 | { |
| 14905 | tree vars = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt)); |
| 14906 | oacc_privatization_scan_decl_chain (ctx, decls: vars); |
| 14907 | } |
| 14908 | lower_omp (gimple_bind_body_ptr (bind_stmt: as_a <gbind *> (p: stmt)), ctx); |
| 14909 | maybe_remove_omp_member_access_dummy_vars (bind: as_a <gbind *> (p: stmt)); |
| 14910 | break; |
| 14911 | case GIMPLE_OMP_PARALLEL: |
| 14912 | case GIMPLE_OMP_TASK: |
| 14913 | ctx = maybe_lookup_ctx (stmt); |
| 14914 | gcc_assert (ctx); |
| 14915 | if (ctx->cancellable) |
| 14916 | ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14917 | lower_omp_taskreg (gsi_p, ctx); |
| 14918 | break; |
| 14919 | case GIMPLE_OMP_FOR: |
| 14920 | ctx = maybe_lookup_ctx (stmt); |
| 14921 | gcc_assert (ctx); |
| 14922 | if (ctx->cancellable) |
| 14923 | ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14924 | lower_omp_for (gsi_p, ctx); |
| 14925 | break; |
| 14926 | case GIMPLE_OMP_SECTIONS: |
| 14927 | ctx = maybe_lookup_ctx (stmt); |
| 14928 | gcc_assert (ctx); |
| 14929 | if (ctx->cancellable) |
| 14930 | ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); |
| 14931 | lower_omp_sections (gsi_p, ctx); |
| 14932 | break; |
| 14933 | case GIMPLE_OMP_SCOPE: |
| 14934 | ctx = maybe_lookup_ctx (stmt); |
| 14935 | gcc_assert (ctx); |
| 14936 | lower_omp_scope (gsi_p, ctx); |
| 14937 | break; |
| 14938 | case GIMPLE_OMP_DISPATCH: |
| 14939 | ctx = maybe_lookup_ctx (stmt); |
| 14940 | gcc_assert (ctx); |
| 14941 | lower_omp_dispatch (gsi_p, ctx); |
| 14942 | break; |
| 14943 | case GIMPLE_OMP_INTEROP: |
| 14944 | ctx = maybe_lookup_ctx (stmt); |
| 14945 | gcc_assert (ctx); |
| 14946 | lower_omp_interop (gsi_p, ctx); |
| 14947 | break; |
| 14948 | case GIMPLE_OMP_SINGLE: |
| 14949 | ctx = maybe_lookup_ctx (stmt); |
| 14950 | gcc_assert (ctx); |
| 14951 | lower_omp_single (gsi_p, ctx); |
| 14952 | break; |
| 14953 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
| 14954 | /* We have already done error checking at this point, so these nodes |
| 14955 | can be completely removed and replaced with their body. */ |
| 14956 | ctx = maybe_lookup_ctx (stmt); |
| 14957 | gcc_assert (ctx); |
| 14958 | lower_omp (gimple_omp_body_ptr (gs: stmt), ctx); |
| 14959 | gsi_replace_with_seq (gsi_p, gimple_omp_body (gs: stmt), true); |
| 14960 | break; |
| 14961 | case GIMPLE_OMP_MASTER: |
| 14962 | case GIMPLE_OMP_MASKED: |
| 14963 | ctx = maybe_lookup_ctx (stmt); |
| 14964 | gcc_assert (ctx); |
| 14965 | lower_omp_master (gsi_p, ctx); |
| 14966 | break; |
| 14967 | case GIMPLE_OMP_TASKGROUP: |
| 14968 | ctx = maybe_lookup_ctx (stmt); |
| 14969 | gcc_assert (ctx); |
| 14970 | lower_omp_taskgroup (gsi_p, ctx); |
| 14971 | break; |
| 14972 | case GIMPLE_OMP_ORDERED: |
| 14973 | ctx = maybe_lookup_ctx (stmt); |
| 14974 | gcc_assert (ctx); |
| 14975 | lower_omp_ordered (gsi_p, ctx); |
| 14976 | break; |
| 14977 | case GIMPLE_OMP_SCAN: |
| 14978 | ctx = maybe_lookup_ctx (stmt); |
| 14979 | gcc_assert (ctx); |
| 14980 | lower_omp_scan (gsi_p, ctx); |
| 14981 | break; |
| 14982 | case GIMPLE_OMP_CRITICAL: |
| 14983 | ctx = maybe_lookup_ctx (stmt); |
| 14984 | gcc_assert (ctx); |
| 14985 | lower_omp_critical (gsi_p, ctx); |
| 14986 | break; |
| 14987 | case GIMPLE_OMP_ATOMIC_LOAD: |
| 14988 | if ((ctx || make_addressable_vars) |
| 14989 | && walk_tree (gimple_omp_atomic_load_rhs_ptr ( |
| 14990 | as_a <gomp_atomic_load *> (stmt)), |
| 14991 | lower_omp_regimplify_p, ctx ? NULL : &wi, NULL)) |
| 14992 | lower_omp_regimplify_operands (ctx, stmt, gsi_p); |
| 14993 | break; |
| 14994 | case GIMPLE_OMP_TARGET: |
| 14995 | ctx = maybe_lookup_ctx (stmt); |
| 14996 | gcc_assert (ctx); |
| 14997 | lower_omp_target (gsi_p, ctx); |
| 14998 | break; |
| 14999 | case GIMPLE_OMP_TEAMS: |
| 15000 | ctx = maybe_lookup_ctx (stmt); |
| 15001 | gcc_assert (ctx); |
| 15002 | if (gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
| 15003 | lower_omp_taskreg (gsi_p, ctx); |
| 15004 | else |
| 15005 | lower_omp_teams (gsi_p, ctx); |
| 15006 | break; |
| 15007 | case GIMPLE_CALL: |
| 15008 | tree fndecl; |
| 15009 | call_stmt = as_a <gcall *> (p: stmt); |
| 15010 | fndecl = gimple_call_fndecl (gs: call_stmt); |
| 15011 | if (fndecl |
| 15012 | && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
| 15013 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
| 15014 | { |
| 15015 | case BUILT_IN_GOMP_BARRIER: |
| 15016 | if (ctx == NULL) |
| 15017 | break; |
| 15018 | /* FALLTHRU */ |
| 15019 | case BUILT_IN_GOMP_CANCEL: |
| 15020 | case BUILT_IN_GOMP_CANCELLATION_POINT: |
| 15021 | omp_context *cctx; |
| 15022 | cctx = ctx; |
| 15023 | if (gimple_code (g: cctx->stmt) == GIMPLE_OMP_SECTION) |
| 15024 | cctx = cctx->outer; |
| 15025 | gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE); |
| 15026 | if (!cctx->cancellable) |
| 15027 | { |
| 15028 | if (DECL_FUNCTION_CODE (decl: fndecl) |
| 15029 | == BUILT_IN_GOMP_CANCELLATION_POINT) |
| 15030 | { |
| 15031 | stmt = gimple_build_nop (); |
| 15032 | gsi_replace (gsi_p, stmt, false); |
| 15033 | } |
| 15034 | break; |
| 15035 | } |
| 15036 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_GOMP_BARRIER) |
| 15037 | { |
| 15038 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_BARRIER_CANCEL); |
| 15039 | gimple_call_set_fndecl (gs: call_stmt, decl: fndecl); |
| 15040 | gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl)); |
| 15041 | } |
| 15042 | tree lhs; |
| 15043 | lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl))); |
| 15044 | gimple_call_set_lhs (gs: call_stmt, lhs); |
| 15045 | tree fallthru_label; |
| 15046 | fallthru_label = create_artificial_label (UNKNOWN_LOCATION); |
| 15047 | gimple *g; |
| 15048 | g = gimple_build_label (label: fallthru_label); |
| 15049 | gsi_insert_after (gsi_p, g, GSI_SAME_STMT); |
| 15050 | g = gimple_build_cond (NE_EXPR, lhs, |
| 15051 | fold_convert (TREE_TYPE (lhs), |
| 15052 | boolean_false_node), |
| 15053 | cctx->cancel_label, fallthru_label); |
| 15054 | gsi_insert_after (gsi_p, g, GSI_SAME_STMT); |
| 15055 | break; |
| 15056 | default: |
| 15057 | break; |
| 15058 | } |
| 15059 | goto regimplify; |
| 15060 | |
| 15061 | case GIMPLE_ASSIGN: |
| 15062 | for (omp_context *up = ctx; up; up = up->outer) |
| 15063 | { |
| 15064 | if (gimple_code (g: up->stmt) == GIMPLE_OMP_ORDERED |
| 15065 | || gimple_code (g: up->stmt) == GIMPLE_OMP_CRITICAL |
| 15066 | || gimple_code (g: up->stmt) == GIMPLE_OMP_TASKGROUP |
| 15067 | || gimple_code (g: up->stmt) == GIMPLE_OMP_SCOPE |
| 15068 | || gimple_code (g: up->stmt) == GIMPLE_OMP_SECTION |
| 15069 | || gimple_code (g: up->stmt) == GIMPLE_OMP_SCAN |
| 15070 | || (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET |
| 15071 | && (gimple_omp_target_kind (g: up->stmt) |
| 15072 | == GF_OMP_TARGET_KIND_DATA))) |
| 15073 | continue; |
| 15074 | else if (!up->lastprivate_conditional_map) |
| 15075 | break; |
| 15076 | tree lhs = get_base_address (t: gimple_assign_lhs (gs: stmt)); |
| 15077 | if (TREE_CODE (lhs) == MEM_REF |
| 15078 | && DECL_P (TREE_OPERAND (lhs, 0)) |
| 15079 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs, |
| 15080 | 0))) == REFERENCE_TYPE) |
| 15081 | lhs = TREE_OPERAND (lhs, 0); |
| 15082 | if (DECL_P (lhs)) |
| 15083 | if (tree *v = up->lastprivate_conditional_map->get (k: lhs)) |
| 15084 | { |
| 15085 | tree clauses; |
| 15086 | if (up->combined_into_simd_safelen1) |
| 15087 | { |
| 15088 | up = up->outer; |
| 15089 | if (gimple_code (g: up->stmt) == GIMPLE_OMP_SCAN) |
| 15090 | up = up->outer; |
| 15091 | } |
| 15092 | if (gimple_code (g: up->stmt) == GIMPLE_OMP_FOR) |
| 15093 | clauses = gimple_omp_for_clauses (gs: up->stmt); |
| 15094 | else |
| 15095 | clauses = gimple_omp_sections_clauses (gs: up->stmt); |
| 15096 | tree c = omp_find_clause (clauses, kind: OMP_CLAUSE__CONDTEMP_); |
| 15097 | if (!OMP_CLAUSE__CONDTEMP__ITER (c)) |
| 15098 | c = omp_find_clause (OMP_CLAUSE_CHAIN (c), |
| 15099 | kind: OMP_CLAUSE__CONDTEMP_); |
| 15100 | gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c)); |
| 15101 | gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c)); |
| 15102 | gsi_insert_after (gsi_p, g, GSI_SAME_STMT); |
| 15103 | } |
| 15104 | } |
| 15105 | /* FALLTHRU */ |
| 15106 | |
| 15107 | default: |
| 15108 | regimplify: |
| 15109 | if ((ctx || make_addressable_vars) |
| 15110 | && walk_gimple_op (stmt, lower_omp_regimplify_p, |
| 15111 | ctx ? NULL : &wi)) |
| 15112 | { |
| 15113 | /* Just remove clobbers, this should happen only if we have |
| 15114 | "privatized" local addressable variables in SIMD regions, |
| 15115 | the clobber isn't needed in that case and gimplifying address |
| 15116 | of the ARRAY_REF into a pointer and creating MEM_REF based |
| 15117 | clobber would create worse code than we get with the clobber |
| 15118 | dropped. */ |
| 15119 | if (gimple_clobber_p (s: stmt)) |
| 15120 | { |
| 15121 | gsi_replace (gsi_p, gimple_build_nop (), true); |
| 15122 | break; |
| 15123 | } |
| 15124 | lower_omp_regimplify_operands (ctx, stmt, gsi_p); |
| 15125 | } |
| 15126 | break; |
| 15127 | } |
| 15128 | } |
| 15129 | |
| 15130 | static void |
| 15131 | lower_omp (gimple_seq *body, omp_context *ctx) |
| 15132 | { |
| 15133 | location_t saved_location = input_location; |
| 15134 | gimple_stmt_iterator gsi; |
| 15135 | for (gsi = gsi_start (seq&: *body); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
| 15136 | lower_omp_1 (gsi_p: &gsi, ctx); |
| 15137 | /* During gimplification, we haven't folded statments inside offloading |
| 15138 | or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */ |
| 15139 | if (target_nesting_level || taskreg_nesting_level) |
| 15140 | for (gsi = gsi_start (seq&: *body); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
| 15141 | fold_stmt (&gsi); |
| 15142 | input_location = saved_location; |
| 15143 | } |
| 15144 | |
| 15145 | /* Main entry point. */ |
| 15146 | |
| 15147 | static unsigned int |
| 15148 | execute_lower_omp (void) |
| 15149 | { |
| 15150 | gimple_seq body; |
| 15151 | int i; |
| 15152 | omp_context *ctx; |
| 15153 | |
| 15154 | /* This pass always runs, to provide PROP_gimple_lomp. |
| 15155 | But often, there is nothing to do. */ |
| 15156 | if (flag_openacc == 0 && flag_openmp == 0 |
| 15157 | && flag_openmp_simd == 0) |
| 15158 | return 0; |
| 15159 | |
| 15160 | all_contexts = splay_tree_new (splay_tree_compare_pointers, 0, |
| 15161 | delete_omp_context); |
| 15162 | |
| 15163 | body = gimple_body (current_function_decl); |
| 15164 | |
| 15165 | scan_omp (body_p: &body, NULL); |
| 15166 | gcc_assert (taskreg_nesting_level == 0); |
| 15167 | FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx) |
| 15168 | finish_taskreg_scan (ctx); |
| 15169 | taskreg_contexts.release (); |
| 15170 | |
| 15171 | if (all_contexts->root) |
| 15172 | { |
| 15173 | if (make_addressable_vars) |
| 15174 | push_gimplify_context (); |
| 15175 | lower_omp (body: &body, NULL); |
| 15176 | if (make_addressable_vars) |
| 15177 | pop_gimplify_context (NULL); |
| 15178 | } |
| 15179 | |
| 15180 | if (all_contexts) |
| 15181 | { |
| 15182 | splay_tree_delete (all_contexts); |
| 15183 | all_contexts = NULL; |
| 15184 | } |
| 15185 | BITMAP_FREE (make_addressable_vars); |
| 15186 | BITMAP_FREE (global_nonaddressable_vars); |
| 15187 | |
| 15188 | /* If current function is a method, remove artificial dummy VAR_DECL created |
| 15189 | for non-static data member privatization, they aren't needed for |
| 15190 | debuginfo nor anything else, have been already replaced everywhere in the |
| 15191 | IL and cause problems with LTO. */ |
| 15192 | if (DECL_ARGUMENTS (current_function_decl) |
| 15193 | && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl)) |
| 15194 | && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl))) |
| 15195 | == POINTER_TYPE)) |
| 15196 | remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl)); |
| 15197 | |
| 15198 | for (auto task_stmt : task_cpyfns) |
| 15199 | finalize_task_copyfn (task_stmt); |
| 15200 | task_cpyfns.release (); |
| 15201 | return 0; |
| 15202 | } |
| 15203 | |
| 15204 | namespace { |
| 15205 | |
| 15206 | const pass_data pass_data_lower_omp = |
| 15207 | { |
| 15208 | .type: GIMPLE_PASS, /* type */ |
| 15209 | .name: "omplower" , /* name */ |
| 15210 | .optinfo_flags: OPTGROUP_OMP, /* optinfo_flags */ |
| 15211 | .tv_id: TV_NONE, /* tv_id */ |
| 15212 | PROP_gimple_any, /* properties_required */ |
| 15213 | PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */ |
| 15214 | .properties_destroyed: 0, /* properties_destroyed */ |
| 15215 | .todo_flags_start: 0, /* todo_flags_start */ |
| 15216 | .todo_flags_finish: 0, /* todo_flags_finish */ |
| 15217 | }; |
| 15218 | |
| 15219 | class pass_lower_omp : public gimple_opt_pass |
| 15220 | { |
| 15221 | public: |
| 15222 | pass_lower_omp (gcc::context *ctxt) |
| 15223 | : gimple_opt_pass (pass_data_lower_omp, ctxt) |
| 15224 | {} |
| 15225 | |
| 15226 | /* opt_pass methods: */ |
| 15227 | unsigned int execute (function *) final override |
| 15228 | { |
| 15229 | return execute_lower_omp (); |
| 15230 | } |
| 15231 | |
| 15232 | }; // class pass_lower_omp |
| 15233 | |
| 15234 | } // anon namespace |
| 15235 | |
| 15236 | gimple_opt_pass * |
| 15237 | make_pass_lower_omp (gcc::context *ctxt) |
| 15238 | { |
| 15239 | return new pass_lower_omp (ctxt); |
| 15240 | } |
| 15241 | |
| 15242 | /* The following is a utility to diagnose structured block violations. |
| 15243 | It is not part of the "omplower" pass, as that's invoked too late. It |
| 15244 | should be invoked by the respective front ends after gimplification. */ |
| 15245 | |
| 15246 | static splay_tree all_labels; |
| 15247 | |
| 15248 | /* Check for mismatched contexts and generate an error if needed. Return |
| 15249 | true if an error is detected. */ |
| 15250 | |
| 15251 | static bool |
| 15252 | diagnose_sb_0 (gimple_stmt_iterator *gsi_p, |
| 15253 | gimple *branch_ctx, gimple *label_ctx) |
| 15254 | { |
| 15255 | gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx)); |
| 15256 | gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx)); |
| 15257 | |
| 15258 | if (label_ctx == branch_ctx) |
| 15259 | return false; |
| 15260 | |
| 15261 | const char* kind = NULL; |
| 15262 | |
| 15263 | if (flag_openacc) |
| 15264 | { |
| 15265 | if ((branch_ctx && is_gimple_omp_oacc (stmt: branch_ctx)) |
| 15266 | || (label_ctx && is_gimple_omp_oacc (stmt: label_ctx))) |
| 15267 | { |
| 15268 | gcc_checking_assert (kind == NULL); |
| 15269 | kind = "OpenACC" ; |
| 15270 | } |
| 15271 | } |
| 15272 | if (kind == NULL) |
| 15273 | { |
| 15274 | gcc_checking_assert (flag_openmp || flag_openmp_simd); |
| 15275 | kind = "OpenMP" ; |
| 15276 | } |
| 15277 | |
| 15278 | /* Previously we kept track of the label's entire context in diagnose_sb_[12] |
| 15279 | so we could traverse it and issue a correct "exit" or "enter" error |
| 15280 | message upon a structured block violation. |
| 15281 | |
| 15282 | We built the context by building a list with tree_cons'ing, but there is |
| 15283 | no easy counterpart in gimple tuples. It seems like far too much work |
| 15284 | for issuing exit/enter error messages. If someone really misses the |
| 15285 | distinct error message... patches welcome. */ |
| 15286 | |
| 15287 | #if 0 |
| 15288 | /* Try to avoid confusing the user by producing and error message |
| 15289 | with correct "exit" or "enter" verbiage. We prefer "exit" |
| 15290 | unless we can show that LABEL_CTX is nested within BRANCH_CTX. */ |
| 15291 | if (branch_ctx == NULL) |
| 15292 | exit_p = false; |
| 15293 | else |
| 15294 | { |
| 15295 | while (label_ctx) |
| 15296 | { |
| 15297 | if (TREE_VALUE (label_ctx) == branch_ctx) |
| 15298 | { |
| 15299 | exit_p = false; |
| 15300 | break; |
| 15301 | } |
| 15302 | label_ctx = TREE_CHAIN (label_ctx); |
| 15303 | } |
| 15304 | } |
| 15305 | |
| 15306 | if (exit_p) |
| 15307 | error ("invalid exit from %s structured block" , kind); |
| 15308 | else |
| 15309 | error ("invalid entry to %s structured block" , kind); |
| 15310 | #endif |
| 15311 | |
| 15312 | /* If it's obvious we have an invalid entry, be specific about the error. */ |
| 15313 | if (branch_ctx == NULL) |
| 15314 | error ("invalid entry to %s structured block" , kind); |
| 15315 | else |
| 15316 | { |
| 15317 | /* Otherwise, be vague and lazy, but efficient. */ |
| 15318 | error ("invalid branch to/from %s structured block" , kind); |
| 15319 | } |
| 15320 | |
| 15321 | gsi_replace (gsi_p, gimple_build_nop (), false); |
| 15322 | return true; |
| 15323 | } |
| 15324 | |
| 15325 | /* Pass 1: Create a minimal tree of structured blocks, and record |
| 15326 | where each label is found. */ |
| 15327 | |
| 15328 | static tree |
| 15329 | diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| 15330 | struct walk_stmt_info *wi) |
| 15331 | { |
| 15332 | gimple *context = (gimple *) wi->info; |
| 15333 | gimple *inner_context; |
| 15334 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 15335 | |
| 15336 | *handled_ops_p = true; |
| 15337 | |
| 15338 | switch (gimple_code (g: stmt)) |
| 15339 | { |
| 15340 | WALK_SUBSTMTS; |
| 15341 | |
| 15342 | case GIMPLE_OMP_PARALLEL: |
| 15343 | case GIMPLE_OMP_TASK: |
| 15344 | case GIMPLE_OMP_SCOPE: |
| 15345 | case GIMPLE_OMP_SECTIONS: |
| 15346 | case GIMPLE_OMP_SINGLE: |
| 15347 | case GIMPLE_OMP_SECTION: |
| 15348 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
| 15349 | case GIMPLE_OMP_MASTER: |
| 15350 | case GIMPLE_OMP_MASKED: |
| 15351 | case GIMPLE_OMP_ORDERED: |
| 15352 | case GIMPLE_OMP_SCAN: |
| 15353 | case GIMPLE_OMP_CRITICAL: |
| 15354 | case GIMPLE_OMP_TARGET: |
| 15355 | case GIMPLE_OMP_TEAMS: |
| 15356 | case GIMPLE_OMP_TASKGROUP: |
| 15357 | /* The minimal context here is just the current OMP construct. */ |
| 15358 | inner_context = stmt; |
| 15359 | wi->info = inner_context; |
| 15360 | walk_gimple_seq (gimple_omp_body (gs: stmt), diagnose_sb_1, NULL, wi); |
| 15361 | wi->info = context; |
| 15362 | break; |
| 15363 | |
| 15364 | case GIMPLE_OMP_FOR: |
| 15365 | inner_context = stmt; |
| 15366 | wi->info = inner_context; |
| 15367 | /* gimple_omp_for_{index,initial,final} are all DECLs; no need to |
| 15368 | walk them. */ |
| 15369 | walk_gimple_seq (gimple_omp_for_pre_body (gs: stmt), |
| 15370 | diagnose_sb_1, NULL, wi); |
| 15371 | walk_gimple_seq (gimple_omp_body (gs: stmt), diagnose_sb_1, NULL, wi); |
| 15372 | wi->info = context; |
| 15373 | break; |
| 15374 | |
| 15375 | case GIMPLE_LABEL: |
| 15376 | splay_tree_insert (all_labels, |
| 15377 | (splay_tree_key) gimple_label_label ( |
| 15378 | gs: as_a <glabel *> (p: stmt)), |
| 15379 | (splay_tree_value) context); |
| 15380 | break; |
| 15381 | |
| 15382 | default: |
| 15383 | break; |
| 15384 | } |
| 15385 | |
| 15386 | return NULL_TREE; |
| 15387 | } |
| 15388 | |
| 15389 | /* Pass 2: Check each branch and see if its context differs from that of |
| 15390 | the destination label's context. */ |
| 15391 | |
| 15392 | static tree |
| 15393 | diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| 15394 | struct walk_stmt_info *wi) |
| 15395 | { |
| 15396 | gimple *context = (gimple *) wi->info; |
| 15397 | splay_tree_node n; |
| 15398 | gimple *stmt = gsi_stmt (i: *gsi_p); |
| 15399 | |
| 15400 | *handled_ops_p = true; |
| 15401 | |
| 15402 | switch (gimple_code (g: stmt)) |
| 15403 | { |
| 15404 | WALK_SUBSTMTS; |
| 15405 | |
| 15406 | case GIMPLE_OMP_PARALLEL: |
| 15407 | case GIMPLE_OMP_TASK: |
| 15408 | case GIMPLE_OMP_SCOPE: |
| 15409 | case GIMPLE_OMP_SECTIONS: |
| 15410 | case GIMPLE_OMP_SINGLE: |
| 15411 | case GIMPLE_OMP_SECTION: |
| 15412 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
| 15413 | case GIMPLE_OMP_MASTER: |
| 15414 | case GIMPLE_OMP_MASKED: |
| 15415 | case GIMPLE_OMP_ORDERED: |
| 15416 | case GIMPLE_OMP_SCAN: |
| 15417 | case GIMPLE_OMP_CRITICAL: |
| 15418 | case GIMPLE_OMP_TARGET: |
| 15419 | case GIMPLE_OMP_TEAMS: |
| 15420 | case GIMPLE_OMP_TASKGROUP: |
| 15421 | wi->info = stmt; |
| 15422 | walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), diagnose_sb_2, NULL, wi); |
| 15423 | wi->info = context; |
| 15424 | break; |
| 15425 | |
| 15426 | case GIMPLE_OMP_FOR: |
| 15427 | wi->info = stmt; |
| 15428 | /* gimple_omp_for_{index,initial,final} are all DECLs; no need to |
| 15429 | walk them. */ |
| 15430 | walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (gs: stmt), |
| 15431 | diagnose_sb_2, NULL, wi); |
| 15432 | walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), diagnose_sb_2, NULL, wi); |
| 15433 | wi->info = context; |
| 15434 | break; |
| 15435 | |
| 15436 | case GIMPLE_COND: |
| 15437 | { |
| 15438 | gcond *cond_stmt = as_a <gcond *> (p: stmt); |
| 15439 | tree lab = gimple_cond_true_label (gs: cond_stmt); |
| 15440 | if (lab) |
| 15441 | { |
| 15442 | n = splay_tree_lookup (all_labels, |
| 15443 | (splay_tree_key) lab); |
| 15444 | diagnose_sb_0 (gsi_p, branch_ctx: context, |
| 15445 | label_ctx: n ? (gimple *) n->value : NULL); |
| 15446 | } |
| 15447 | lab = gimple_cond_false_label (gs: cond_stmt); |
| 15448 | if (lab) |
| 15449 | { |
| 15450 | n = splay_tree_lookup (all_labels, |
| 15451 | (splay_tree_key) lab); |
| 15452 | diagnose_sb_0 (gsi_p, branch_ctx: context, |
| 15453 | label_ctx: n ? (gimple *) n->value : NULL); |
| 15454 | } |
| 15455 | } |
| 15456 | break; |
| 15457 | |
| 15458 | case GIMPLE_GOTO: |
| 15459 | { |
| 15460 | tree lab = gimple_goto_dest (gs: stmt); |
| 15461 | if (TREE_CODE (lab) != LABEL_DECL) |
| 15462 | break; |
| 15463 | |
| 15464 | n = splay_tree_lookup (all_labels, (splay_tree_key) lab); |
| 15465 | diagnose_sb_0 (gsi_p, branch_ctx: context, label_ctx: n ? (gimple *) n->value : NULL); |
| 15466 | } |
| 15467 | break; |
| 15468 | |
| 15469 | case GIMPLE_SWITCH: |
| 15470 | { |
| 15471 | gswitch *switch_stmt = as_a <gswitch *> (p: stmt); |
| 15472 | unsigned int i; |
| 15473 | for (i = 0; i < gimple_switch_num_labels (gs: switch_stmt); ++i) |
| 15474 | { |
| 15475 | tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i)); |
| 15476 | n = splay_tree_lookup (all_labels, (splay_tree_key) lab); |
| 15477 | if (n && diagnose_sb_0 (gsi_p, branch_ctx: context, label_ctx: (gimple *) n->value)) |
| 15478 | break; |
| 15479 | } |
| 15480 | } |
| 15481 | break; |
| 15482 | |
| 15483 | case GIMPLE_ASM: |
| 15484 | { |
| 15485 | gasm *asm_stmt = as_a <gasm *> (p: stmt); |
| 15486 | for (unsigned i = 0; i < gimple_asm_nlabels (asm_stmt); ++i) |
| 15487 | { |
| 15488 | tree lab = TREE_VALUE (gimple_asm_label_op (asm_stmt, i)); |
| 15489 | n = splay_tree_lookup (all_labels, (splay_tree_key) lab); |
| 15490 | if (n && diagnose_sb_0 (gsi_p, branch_ctx: context, label_ctx: (gimple *) n->value)) |
| 15491 | break; |
| 15492 | } |
| 15493 | } |
| 15494 | break; |
| 15495 | |
| 15496 | case GIMPLE_RETURN: |
| 15497 | diagnose_sb_0 (gsi_p, branch_ctx: context, NULL); |
| 15498 | break; |
| 15499 | |
| 15500 | default: |
| 15501 | break; |
| 15502 | } |
| 15503 | |
| 15504 | return NULL_TREE; |
| 15505 | } |
| 15506 | |
| 15507 | static unsigned int |
| 15508 | diagnose_omp_structured_block_errors (void) |
| 15509 | { |
| 15510 | struct walk_stmt_info wi; |
| 15511 | gimple_seq body = gimple_body (current_function_decl); |
| 15512 | |
| 15513 | all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0); |
| 15514 | |
| 15515 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 15516 | walk_gimple_seq (body, diagnose_sb_1, NULL, &wi); |
| 15517 | |
| 15518 | memset (s: &wi, c: 0, n: sizeof (wi)); |
| 15519 | wi.want_locations = true; |
| 15520 | walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi); |
| 15521 | |
| 15522 | gimple_set_body (current_function_decl, body); |
| 15523 | |
| 15524 | splay_tree_delete (all_labels); |
| 15525 | all_labels = NULL; |
| 15526 | |
| 15527 | return 0; |
| 15528 | } |
| 15529 | |
| 15530 | namespace { |
| 15531 | |
| 15532 | const pass_data pass_data_diagnose_omp_blocks = |
| 15533 | { |
| 15534 | .type: GIMPLE_PASS, /* type */ |
| 15535 | .name: "*diagnose_omp_blocks" , /* name */ |
| 15536 | .optinfo_flags: OPTGROUP_OMP, /* optinfo_flags */ |
| 15537 | .tv_id: TV_NONE, /* tv_id */ |
| 15538 | PROP_gimple_any, /* properties_required */ |
| 15539 | .properties_provided: 0, /* properties_provided */ |
| 15540 | .properties_destroyed: 0, /* properties_destroyed */ |
| 15541 | .todo_flags_start: 0, /* todo_flags_start */ |
| 15542 | .todo_flags_finish: 0, /* todo_flags_finish */ |
| 15543 | }; |
| 15544 | |
| 15545 | class pass_diagnose_omp_blocks : public gimple_opt_pass |
| 15546 | { |
| 15547 | public: |
| 15548 | pass_diagnose_omp_blocks (gcc::context *ctxt) |
| 15549 | : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt) |
| 15550 | {} |
| 15551 | |
| 15552 | /* opt_pass methods: */ |
| 15553 | bool gate (function *) final override |
| 15554 | { |
| 15555 | return flag_openacc || flag_openmp || flag_openmp_simd; |
| 15556 | } |
| 15557 | unsigned int execute (function *) final override |
| 15558 | { |
| 15559 | return diagnose_omp_structured_block_errors (); |
| 15560 | } |
| 15561 | |
| 15562 | }; // class pass_diagnose_omp_blocks |
| 15563 | |
| 15564 | } // anon namespace |
| 15565 | |
| 15566 | gimple_opt_pass * |
| 15567 | make_pass_diagnose_omp_blocks (gcc::context *ctxt) |
| 15568 | { |
| 15569 | return new pass_diagnose_omp_blocks (ctxt); |
| 15570 | } |
| 15571 | |
| 15572 | |
| 15573 | #include "gt-omp-low.h" |
| 15574 | |