1169689Skan/* Lowering pass for OpenMP directives.  Converts OpenMP directives
2169689Skan   into explicit calls to the runtime library (libgomp) and data
3169689Skan   marshalling to implement data sharing and copying clauses.
4169689Skan   Contributed by Diego Novillo <dnovillo@redhat.com>
5169689Skan
6169689Skan   Copyright (C) 2005, 2006 Free Software Foundation, Inc.
7169689Skan
8169689SkanThis file is part of GCC.
9169689Skan
10169689SkanGCC is free software; you can redistribute it and/or modify it under
11169689Skanthe terms of the GNU General Public License as published by the Free
12169689SkanSoftware Foundation; either version 2, or (at your option) any later
13169689Skanversion.
14169689Skan
15169689SkanGCC is distributed in the hope that it will be useful, but WITHOUT ANY
16169689SkanWARRANTY; without even the implied warranty of MERCHANTABILITY or
17169689SkanFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18169689Skanfor more details.
19169689Skan
20169689SkanYou should have received a copy of the GNU General Public License
21169689Skanalong with GCC; see the file COPYING.  If not, write to the Free
22169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23169689Skan02110-1301, USA.  */
24169689Skan
25169689Skan#include "config.h"
26169689Skan#include "system.h"
27169689Skan#include "coretypes.h"
28169689Skan#include "tm.h"
29169689Skan#include "tree.h"
30169689Skan#include "rtl.h"
31169689Skan#include "tree-gimple.h"
32169689Skan#include "tree-inline.h"
33169689Skan#include "langhooks.h"
34169689Skan#include "diagnostic.h"
35169689Skan#include "tree-flow.h"
36169689Skan#include "timevar.h"
37169689Skan#include "flags.h"
38169689Skan#include "function.h"
39169689Skan#include "expr.h"
40169689Skan#include "toplev.h"
41169689Skan#include "tree-pass.h"
42169689Skan#include "ggc.h"
43169689Skan#include "except.h"
44169689Skan
45169689Skan
46169689Skan/* Lowering of OpenMP parallel and workshare constructs proceeds in two
47169689Skan   phases.  The first phase scans the function looking for OMP statements
48169689Skan   and then for variables that must be replaced to satisfy data sharing
49169689Skan   clauses.  The second phase expands code for the constructs, as well as
50169689Skan   re-gimplifying things when variables have been replaced with complex
51169689Skan   expressions.
52169689Skan
53169689Skan   Final code generation is done by pass_expand_omp.  The flowgraph is
54169689Skan   scanned for parallel regions which are then moved to a new
55169689Skan   function, to be invoked by the thread library.  */
56169689Skan
57169689Skan/* Context structure.  Used to store information about each parallel
58169689Skan   directive in the code.  */
59169689Skan
60169689Skantypedef struct omp_context
61169689Skan{
62169689Skan  /* This field must be at the beginning, as we do "inheritance": Some
63169689Skan     callback functions for tree-inline.c (e.g., omp_copy_decl)
64169689Skan     receive a copy_body_data pointer that is up-casted to an
65169689Skan     omp_context pointer.  */
66169689Skan  copy_body_data cb;
67169689Skan
68169689Skan  /* The tree of contexts corresponding to the encountered constructs.  */
69169689Skan  struct omp_context *outer;
70169689Skan  tree stmt;
71169689Skan
72169689Skan  /* Map variables to fields in a structure that allows communication
73169689Skan     between sending and receiving threads.  */
74169689Skan  splay_tree field_map;
75169689Skan  tree record_type;
76169689Skan  tree sender_decl;
77169689Skan  tree receiver_decl;
78169689Skan
79169689Skan  /* A chain of variables to add to the top-level block surrounding the
80169689Skan     construct.  In the case of a parallel, this is in the child function.  */
81169689Skan  tree block_vars;
82169689Skan
83169689Skan  /* What to do with variables with implicitly determined sharing
84169689Skan     attributes.  */
85169689Skan  enum omp_clause_default_kind default_kind;
86169689Skan
87169689Skan  /* Nesting depth of this context.  Used to beautify error messages re
88169689Skan     invalid gotos.  The outermost ctx is depth 1, with depth 0 being
89169689Skan     reserved for the main body of the function.  */
90169689Skan  int depth;
91169689Skan
92169689Skan  /* True if this parallel directive is nested within another.  */
93169689Skan  bool is_nested;
94169689Skan} omp_context;
95169689Skan
96169689Skan
97169689Skan/* A structure describing the main elements of a parallel loop.  */
98169689Skan
99169689Skanstruct omp_for_data
100169689Skan{
101169689Skan  tree v, n1, n2, step, chunk_size, for_stmt;
102169689Skan  enum tree_code cond_code;
103169689Skan  tree pre;
104169689Skan  bool have_nowait, have_ordered;
105169689Skan  enum omp_clause_schedule_kind sched_kind;
106169689Skan};
107169689Skan
108169689Skan
109169689Skanstatic splay_tree all_contexts;
110169689Skanstatic int parallel_nesting_level;
111169689Skanstruct omp_region *root_omp_region;
112169689Skan
113169689Skanstatic void scan_omp (tree *, omp_context *);
114169689Skanstatic void lower_omp (tree *, omp_context *);
115169689Skanstatic tree lookup_decl_in_outer_ctx (tree, omp_context *);
116169689Skanstatic tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
117169689Skan
118169689Skan/* Find an OpenMP clause of type KIND within CLAUSES.  */
119169689Skan
120169689Skanstatic tree
121222207Sbenlfind_omp_clause (tree clauses, enum omp_clause_code kind)
122169689Skan{
123169689Skan  for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
124169689Skan    if (OMP_CLAUSE_CODE (clauses) == kind)
125169689Skan      return clauses;
126169689Skan
127169689Skan  return NULL_TREE;
128169689Skan}
129169689Skan
130169689Skan/* Return true if CTX is for an omp parallel.  */
131169689Skan
132169689Skanstatic inline bool
133169689Skanis_parallel_ctx (omp_context *ctx)
134169689Skan{
135169689Skan  return TREE_CODE (ctx->stmt) == OMP_PARALLEL;
136169689Skan}
137169689Skan
138169689Skan
139169689Skan/* Return true if REGION is a combined parallel+workshare region.  */
140169689Skan
141169689Skanstatic inline bool
142169689Skanis_combined_parallel (struct omp_region *region)
143169689Skan{
144169689Skan  return region->is_combined_parallel;
145169689Skan}
146169689Skan
147169689Skan
148169689Skan/* Extract the header elements of parallel loop FOR_STMT and store
149169689Skan   them into *FD.  */
150169689Skan
151169689Skanstatic void
152169689Skanextract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
153169689Skan{
154169689Skan  tree t;
155169689Skan
156169689Skan  fd->for_stmt = for_stmt;
157169689Skan  fd->pre = NULL;
158169689Skan
159169689Skan  t = OMP_FOR_INIT (for_stmt);
160169689Skan  gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
161169689Skan  fd->v = TREE_OPERAND (t, 0);
162169689Skan  gcc_assert (DECL_P (fd->v));
163169689Skan  gcc_assert (TREE_CODE (TREE_TYPE (fd->v)) == INTEGER_TYPE);
164169689Skan  fd->n1 = TREE_OPERAND (t, 1);
165169689Skan
166169689Skan  t = OMP_FOR_COND (for_stmt);
167169689Skan  fd->cond_code = TREE_CODE (t);
168169689Skan  gcc_assert (TREE_OPERAND (t, 0) == fd->v);
169169689Skan  fd->n2 = TREE_OPERAND (t, 1);
170169689Skan  switch (fd->cond_code)
171169689Skan    {
172169689Skan    case LT_EXPR:
173169689Skan    case GT_EXPR:
174169689Skan      break;
175169689Skan    case LE_EXPR:
176169689Skan      fd->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
177169689Skan			   build_int_cst (TREE_TYPE (fd->n2), 1));
178169689Skan      fd->cond_code = LT_EXPR;
179169689Skan      break;
180169689Skan    case GE_EXPR:
181169689Skan      fd->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
182169689Skan			   build_int_cst (TREE_TYPE (fd->n2), 1));
183169689Skan      fd->cond_code = GT_EXPR;
184169689Skan      break;
185169689Skan    default:
186169689Skan      gcc_unreachable ();
187169689Skan    }
188169689Skan
189169689Skan  t = OMP_FOR_INCR (fd->for_stmt);
190169689Skan  gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
191169689Skan  gcc_assert (TREE_OPERAND (t, 0) == fd->v);
192169689Skan  t = TREE_OPERAND (t, 1);
193169689Skan  gcc_assert (TREE_OPERAND (t, 0) == fd->v);
194169689Skan  switch (TREE_CODE (t))
195169689Skan    {
196169689Skan    case PLUS_EXPR:
197169689Skan      fd->step = TREE_OPERAND (t, 1);
198169689Skan      break;
199169689Skan    case MINUS_EXPR:
200169689Skan      fd->step = TREE_OPERAND (t, 1);
201169689Skan      fd->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (fd->step), fd->step);
202169689Skan      break;
203169689Skan    default:
204169689Skan      gcc_unreachable ();
205169689Skan    }
206169689Skan
207169689Skan  fd->have_nowait = fd->have_ordered = false;
208169689Skan  fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
209169689Skan  fd->chunk_size = NULL_TREE;
210169689Skan
211169689Skan  for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
212169689Skan    switch (OMP_CLAUSE_CODE (t))
213169689Skan      {
214169689Skan      case OMP_CLAUSE_NOWAIT:
215169689Skan	fd->have_nowait = true;
216169689Skan	break;
217169689Skan      case OMP_CLAUSE_ORDERED:
218169689Skan	fd->have_ordered = true;
219169689Skan	break;
220169689Skan      case OMP_CLAUSE_SCHEDULE:
221169689Skan	fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
222169689Skan	fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
223169689Skan	break;
224169689Skan      default:
225169689Skan	break;
226169689Skan      }
227169689Skan
228169689Skan  if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
229169689Skan    gcc_assert (fd->chunk_size == NULL);
230169689Skan  else if (fd->chunk_size == NULL)
231169689Skan    {
232169689Skan      /* We only need to compute a default chunk size for ordered
233169689Skan	 static loops and dynamic loops.  */
234169689Skan      if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC || fd->have_ordered)
235169689Skan	fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
236169689Skan			 ? integer_zero_node : integer_one_node;
237169689Skan    }
238169689Skan}
239169689Skan
240169689Skan
241169689Skan/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
242169689Skan   is the immediate dominator of PAR_ENTRY_BB, return true if there
243169689Skan   are no data dependencies that would prevent expanding the parallel
244169689Skan   directive at PAR_ENTRY_BB as a combined parallel+workshare region.
245169689Skan
246169689Skan   When expanding a combined parallel+workshare region, the call to
247169689Skan   the child function may need additional arguments in the case of
248169689Skan   OMP_FOR regions.  In some cases, these arguments are computed out
249169689Skan   of variables passed in from the parent to the child via 'struct
250169689Skan   .omp_data_s'.  For instance:
251169689Skan
252169689Skan	#pragma omp parallel for schedule (guided, i * 4)
253169689Skan	for (j ...)
254169689Skan
255169689Skan   Is lowered into:
256169689Skan
257169689Skan   	# BLOCK 2 (PAR_ENTRY_BB)
258169689Skan	.omp_data_o.i = i;
259169689Skan	#pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
260169689Skan
261169689Skan	# BLOCK 3 (WS_ENTRY_BB)
262169689Skan	.omp_data_i = &.omp_data_o;
263169689Skan	D.1667 = .omp_data_i->i;
264169689Skan	D.1598 = D.1667 * 4;
265169689Skan	#pragma omp for schedule (guided, D.1598)
266169689Skan
267169689Skan   When we outline the parallel region, the call to the child function
268169689Skan   'bar.omp_fn.0' will need the value D.1598 in its argument list, but
269169689Skan   that value is computed *after* the call site.  So, in principle we
270169689Skan   cannot do the transformation.
271169689Skan
272169689Skan   To see whether the code in WS_ENTRY_BB blocks the combined
273169689Skan   parallel+workshare call, we collect all the variables used in the
274169689Skan   OMP_FOR header check whether they appear on the LHS of any
275169689Skan   statement in WS_ENTRY_BB.  If so, then we cannot emit the combined
276169689Skan   call.
277169689Skan
278169689Skan   FIXME.  If we had the SSA form built at this point, we could merely
279169689Skan   hoist the code in block 3 into block 2 and be done with it.  But at
280169689Skan   this point we don't have dataflow information and though we could
281169689Skan   hack something up here, it is really not worth the aggravation.  */
282169689Skan
283169689Skanstatic bool
284169689Skanworkshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
285169689Skan{
286169689Skan  struct omp_for_data fd;
287169689Skan  tree par_stmt, ws_stmt;
288169689Skan
289169689Skan  par_stmt = last_stmt (par_entry_bb);
290169689Skan  ws_stmt = last_stmt (ws_entry_bb);
291169689Skan
292169689Skan  if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
293169689Skan    return true;
294169689Skan
295169689Skan  gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
296169689Skan
297169689Skan  extract_omp_for_data (ws_stmt, &fd);
298169689Skan
299169689Skan  /* FIXME.  We give up too easily here.  If any of these arguments
300169689Skan     are not constants, they will likely involve variables that have
301169689Skan     been mapped into fields of .omp_data_s for sharing with the child
302169689Skan     function.  With appropriate data flow, it would be possible to
303169689Skan     see through this.  */
304169689Skan  if (!is_gimple_min_invariant (fd.n1)
305169689Skan      || !is_gimple_min_invariant (fd.n2)
306169689Skan      || !is_gimple_min_invariant (fd.step)
307169689Skan      || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
308169689Skan    return false;
309169689Skan
310169689Skan  return true;
311169689Skan}
312169689Skan
313169689Skan
314169689Skan/* Collect additional arguments needed to emit a combined
315169689Skan   parallel+workshare call.  WS_STMT is the workshare directive being
316169689Skan   expanded.  */
317169689Skan
318169689Skanstatic tree
319169689Skanget_ws_args_for (tree ws_stmt)
320169689Skan{
321169689Skan  tree t;
322169689Skan
323169689Skan  if (TREE_CODE (ws_stmt) == OMP_FOR)
324169689Skan    {
325169689Skan      struct omp_for_data fd;
326169689Skan      tree ws_args;
327169689Skan
328169689Skan      extract_omp_for_data (ws_stmt, &fd);
329169689Skan
330169689Skan      ws_args = NULL_TREE;
331169689Skan      if (fd.chunk_size)
332169689Skan	{
333169689Skan	  t = fold_convert (long_integer_type_node, fd.chunk_size);
334169689Skan	  ws_args = tree_cons (NULL, t, ws_args);
335169689Skan	}
336169689Skan
337169689Skan      t = fold_convert (long_integer_type_node, fd.step);
338169689Skan      ws_args = tree_cons (NULL, t, ws_args);
339169689Skan
340169689Skan      t = fold_convert (long_integer_type_node, fd.n2);
341169689Skan      ws_args = tree_cons (NULL, t, ws_args);
342169689Skan
343169689Skan      t = fold_convert (long_integer_type_node, fd.n1);
344169689Skan      ws_args = tree_cons (NULL, t, ws_args);
345169689Skan
346169689Skan      return ws_args;
347169689Skan    }
348169689Skan  else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
349169689Skan    {
350169689Skan      basic_block bb = bb_for_stmt (ws_stmt);
351169689Skan      t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs));
352169689Skan      t = tree_cons (NULL, t, NULL);
353169689Skan      return t;
354169689Skan    }
355169689Skan
356169689Skan  gcc_unreachable ();
357169689Skan}
358169689Skan
359169689Skan
360169689Skan/* Discover whether REGION is a combined parallel+workshare region.  */
361169689Skan
362169689Skanstatic void
363169689Skandetermine_parallel_type (struct omp_region *region)
364169689Skan{
365169689Skan  basic_block par_entry_bb, par_exit_bb;
366169689Skan  basic_block ws_entry_bb, ws_exit_bb;
367169689Skan
368169689Skan  if (region == NULL || region->inner == NULL
369169689Skan      || region->exit == NULL || region->inner->exit == NULL)
370169689Skan    return;
371169689Skan
372169689Skan  /* We only support parallel+for and parallel+sections.  */
373169689Skan  if (region->type != OMP_PARALLEL
374169689Skan      || (region->inner->type != OMP_FOR
375169689Skan	  && region->inner->type != OMP_SECTIONS))
376169689Skan    return;
377169689Skan
378169689Skan  /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
379169689Skan     WS_EXIT_BB -> PAR_EXIT_BB.  */
380169689Skan  par_entry_bb = region->entry;
381169689Skan  par_exit_bb = region->exit;
382169689Skan  ws_entry_bb = region->inner->entry;
383169689Skan  ws_exit_bb = region->inner->exit;
384169689Skan
385169689Skan  if (single_succ (par_entry_bb) == ws_entry_bb
386169689Skan      && single_succ (ws_exit_bb) == par_exit_bb
387171825Skan      && workshare_safe_to_combine_p (par_entry_bb, ws_entry_bb)
388171825Skan      && (OMP_PARALLEL_COMBINED (last_stmt (par_entry_bb))
389171825Skan	  || (last_and_only_stmt (ws_entry_bb)
390171825Skan	      && last_and_only_stmt (par_exit_bb))))
391169689Skan    {
392171825Skan      tree ws_stmt = last_stmt (ws_entry_bb);
393169689Skan
394169689Skan      if (region->inner->type == OMP_FOR)
395169689Skan	{
396169689Skan	  /* If this is a combined parallel loop, we need to determine
397169689Skan	     whether or not to use the combined library calls.  There
398169689Skan	     are two cases where we do not apply the transformation:
399169689Skan	     static loops and any kind of ordered loop.  In the first
400169689Skan	     case, we already open code the loop so there is no need
401169689Skan	     to do anything else.  In the latter case, the combined
402169689Skan	     parallel loop call would still need extra synchronization
403169689Skan	     to implement ordered semantics, so there would not be any
404169689Skan	     gain in using the combined call.  */
405169689Skan	  tree clauses = OMP_FOR_CLAUSES (ws_stmt);
406169689Skan	  tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
407169689Skan	  if (c == NULL
408169689Skan	      || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
409169689Skan	      || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
410169689Skan	    {
411169689Skan	      region->is_combined_parallel = false;
412169689Skan	      region->inner->is_combined_parallel = false;
413169689Skan	      return;
414169689Skan	    }
415169689Skan	}
416169689Skan
417169689Skan      region->is_combined_parallel = true;
418169689Skan      region->inner->is_combined_parallel = true;
419169689Skan      region->ws_args = get_ws_args_for (ws_stmt);
420169689Skan    }
421169689Skan}
422169689Skan
423169689Skan
424169689Skan/* Return true if EXPR is variable sized.  */
425169689Skan
426169689Skanstatic inline bool
427169689Skanis_variable_sized (tree expr)
428169689Skan{
429169689Skan  return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
430169689Skan}
431169689Skan
432169689Skan/* Return true if DECL is a reference type.  */
433169689Skan
434169689Skanstatic inline bool
435169689Skanis_reference (tree decl)
436169689Skan{
437169689Skan  return lang_hooks.decls.omp_privatize_by_reference (decl);
438169689Skan}
439169689Skan
440169689Skan/* Lookup variables in the decl or field splay trees.  The "maybe" form
441169689Skan   allows for the variable form to not have been entered, otherwise we
442169689Skan   assert that the variable must have been entered.  */
443169689Skan
444169689Skanstatic inline tree
445169689Skanlookup_decl (tree var, omp_context *ctx)
446169689Skan{
447169689Skan  splay_tree_node n;
448169689Skan  n = splay_tree_lookup (ctx->cb.decl_map, (splay_tree_key) var);
449169689Skan  return (tree) n->value;
450169689Skan}
451169689Skan
452169689Skanstatic inline tree
453169689Skanmaybe_lookup_decl (tree var, omp_context *ctx)
454169689Skan{
455169689Skan  splay_tree_node n;
456169689Skan  n = splay_tree_lookup (ctx->cb.decl_map, (splay_tree_key) var);
457169689Skan  return n ? (tree) n->value : NULL_TREE;
458169689Skan}
459169689Skan
460169689Skanstatic inline tree
461169689Skanlookup_field (tree var, omp_context *ctx)
462169689Skan{
463169689Skan  splay_tree_node n;
464169689Skan  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
465169689Skan  return (tree) n->value;
466169689Skan}
467169689Skan
468169689Skanstatic inline tree
469169689Skanmaybe_lookup_field (tree var, omp_context *ctx)
470169689Skan{
471169689Skan  splay_tree_node n;
472169689Skan  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
473169689Skan  return n ? (tree) n->value : NULL_TREE;
474169689Skan}
475169689Skan
476169689Skan/* Return true if DECL should be copied by pointer.  SHARED_P is true
477169689Skan   if DECL is to be shared.  */
478169689Skan
479169689Skanstatic bool
480169689Skanuse_pointer_for_field (tree decl, bool shared_p)
481169689Skan{
482169689Skan  if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
483169689Skan    return true;
484169689Skan
485169689Skan  /* We can only use copy-in/copy-out semantics for shared variables
486169689Skan     when we know the value is not accessible from an outer scope.  */
487169689Skan  if (shared_p)
488169689Skan    {
489169689Skan      /* ??? Trivially accessible from anywhere.  But why would we even
490169689Skan	 be passing an address in this case?  Should we simply assert
491169689Skan	 this to be false, or should we have a cleanup pass that removes
492169689Skan	 these from the list of mappings?  */
493169689Skan      if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
494169689Skan	return true;
495169689Skan
496169689Skan      /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
497169689Skan	 without analyzing the expression whether or not its location
498169689Skan	 is accessible to anyone else.  In the case of nested parallel
499169689Skan	 regions it certainly may be.  */
500169689Skan      if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
501169689Skan	return true;
502169689Skan
503169689Skan      /* Do not use copy-in/copy-out for variables that have their
504169689Skan	 address taken.  */
505169689Skan      if (TREE_ADDRESSABLE (decl))
506169689Skan	return true;
507169689Skan    }
508169689Skan
509169689Skan  return false;
510169689Skan}
511169689Skan
512169689Skan/* Construct a new automatic decl similar to VAR.  */
513169689Skan
514169689Skanstatic tree
515169689Skanomp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
516169689Skan{
517169689Skan  tree copy = build_decl (VAR_DECL, name, type);
518169689Skan
519169689Skan  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
520169689Skan  DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (var);
521169689Skan  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
522169689Skan  DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
523169689Skan  TREE_USED (copy) = 1;
524169689Skan  DECL_CONTEXT (copy) = current_function_decl;
525169689Skan  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
526169689Skan
527169689Skan  TREE_CHAIN (copy) = ctx->block_vars;
528169689Skan  ctx->block_vars = copy;
529169689Skan
530169689Skan  return copy;
531169689Skan}
532169689Skan
533169689Skanstatic tree
534169689Skanomp_copy_decl_1 (tree var, omp_context *ctx)
535169689Skan{
536169689Skan  return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
537169689Skan}
538169689Skan
539169689Skan/* Build tree nodes to access the field for VAR on the receiver side.  */
540169689Skan
541169689Skanstatic tree
542169689Skanbuild_receiver_ref (tree var, bool by_ref, omp_context *ctx)
543169689Skan{
544169689Skan  tree x, field = lookup_field (var, ctx);
545169689Skan
546169689Skan  /* If the receiver record type was remapped in the child function,
547169689Skan     remap the field into the new record type.  */
548169689Skan  x = maybe_lookup_field (field, ctx);
549169689Skan  if (x != NULL)
550169689Skan    field = x;
551169689Skan
552169689Skan  x = build_fold_indirect_ref (ctx->receiver_decl);
553169689Skan  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
554169689Skan  if (by_ref)
555169689Skan    x = build_fold_indirect_ref (x);
556169689Skan
557169689Skan  return x;
558169689Skan}
559169689Skan
560169689Skan/* Build tree nodes to access VAR in the scope outer to CTX.  In the case
561169689Skan   of a parallel, this is a component reference; for workshare constructs
562169689Skan   this is some variable.  */
563169689Skan
564169689Skanstatic tree
565169689Skanbuild_outer_var_ref (tree var, omp_context *ctx)
566169689Skan{
567169689Skan  tree x;
568169689Skan
569169689Skan  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
570169689Skan    x = var;
571169689Skan  else if (is_variable_sized (var))
572169689Skan    {
573169689Skan      x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
574169689Skan      x = build_outer_var_ref (x, ctx);
575169689Skan      x = build_fold_indirect_ref (x);
576169689Skan    }
577169689Skan  else if (is_parallel_ctx (ctx))
578169689Skan    {
579169689Skan      bool by_ref = use_pointer_for_field (var, false);
580169689Skan      x = build_receiver_ref (var, by_ref, ctx);
581169689Skan    }
582169689Skan  else if (ctx->outer)
583169689Skan    x = lookup_decl (var, ctx->outer);
584169689Skan  else if (is_reference (var))
585169689Skan    /* This can happen with orphaned constructs.  If var is reference, it is
586169689Skan       possible it is shared and as such valid.  */
587169689Skan    x = var;
588169689Skan  else
589169689Skan    gcc_unreachable ();
590169689Skan
591169689Skan  if (is_reference (var))
592169689Skan    x = build_fold_indirect_ref (x);
593169689Skan
594169689Skan  return x;
595169689Skan}
596169689Skan
597169689Skan/* Build tree nodes to access the field for VAR on the sender side.  */
598169689Skan
599169689Skanstatic tree
600169689Skanbuild_sender_ref (tree var, omp_context *ctx)
601169689Skan{
602169689Skan  tree field = lookup_field (var, ctx);
603169689Skan  return build3 (COMPONENT_REF, TREE_TYPE (field),
604169689Skan		 ctx->sender_decl, field, NULL);
605169689Skan}
606169689Skan
607169689Skan/* Add a new field for VAR inside the structure CTX->SENDER_DECL.  */
608169689Skan
609169689Skanstatic void
610169689Skaninstall_var_field (tree var, bool by_ref, omp_context *ctx)
611169689Skan{
612169689Skan  tree field, type;
613169689Skan
614169689Skan  gcc_assert (!splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
615169689Skan
616169689Skan  type = TREE_TYPE (var);
617169689Skan  if (by_ref)
618169689Skan    type = build_pointer_type (type);
619169689Skan
620169689Skan  field = build_decl (FIELD_DECL, DECL_NAME (var), type);
621169689Skan
622169689Skan  /* Remember what variable this field was created for.  This does have a
623169689Skan     side effect of making dwarf2out ignore this member, so for helpful
624169689Skan     debugging we clear it later in delete_omp_context.  */
625169689Skan  DECL_ABSTRACT_ORIGIN (field) = var;
626169689Skan
627169689Skan  insert_field_into_struct (ctx->record_type, field);
628169689Skan
629169689Skan  splay_tree_insert (ctx->field_map, (splay_tree_key) var,
630169689Skan		     (splay_tree_value) field);
631169689Skan}
632169689Skan
633169689Skanstatic tree
634169689Skaninstall_var_local (tree var, omp_context *ctx)
635169689Skan{
636169689Skan  tree new_var = omp_copy_decl_1 (var, ctx);
637169689Skan  insert_decl_map (&ctx->cb, var, new_var);
638169689Skan  return new_var;
639169689Skan}
640169689Skan
641169689Skan/* Adjust the replacement for DECL in CTX for the new context.  This means
642169689Skan   copying the DECL_VALUE_EXPR, and fixing up the type.  */
643169689Skan
644169689Skanstatic void
645169689Skanfixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
646169689Skan{
647169689Skan  tree new_decl, size;
648169689Skan
649169689Skan  new_decl = lookup_decl (decl, ctx);
650169689Skan
651169689Skan  TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
652169689Skan
653169689Skan  if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
654169689Skan      && DECL_HAS_VALUE_EXPR_P (decl))
655169689Skan    {
656169689Skan      tree ve = DECL_VALUE_EXPR (decl);
657169689Skan      walk_tree (&ve, copy_body_r, &ctx->cb, NULL);
658169689Skan      SET_DECL_VALUE_EXPR (new_decl, ve);
659169689Skan      DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
660169689Skan    }
661169689Skan
662169689Skan  if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
663169689Skan    {
664169689Skan      size = remap_decl (DECL_SIZE (decl), &ctx->cb);
665169689Skan      if (size == error_mark_node)
666169689Skan	size = TYPE_SIZE (TREE_TYPE (new_decl));
667169689Skan      DECL_SIZE (new_decl) = size;
668169689Skan
669169689Skan      size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
670169689Skan      if (size == error_mark_node)
671169689Skan	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
672169689Skan      DECL_SIZE_UNIT (new_decl) = size;
673169689Skan    }
674169689Skan}
675169689Skan
676169689Skan/* The callback for remap_decl.  Search all containing contexts for a
677169689Skan   mapping of the variable; this avoids having to duplicate the splay
678169689Skan   tree ahead of time.  We know a mapping doesn't already exist in the
679169689Skan   given context.  Create new mappings to implement default semantics.  */
680169689Skan
681169689Skanstatic tree
682169689Skanomp_copy_decl (tree var, copy_body_data *cb)
683169689Skan{
684169689Skan  omp_context *ctx = (omp_context *) cb;
685169689Skan  tree new_var;
686169689Skan
687169689Skan  if (TREE_CODE (var) == LABEL_DECL)
688169689Skan    {
689169689Skan      new_var = create_artificial_label ();
690169689Skan      DECL_CONTEXT (new_var) = current_function_decl;
691169689Skan      insert_decl_map (&ctx->cb, var, new_var);
692169689Skan      return new_var;
693169689Skan    }
694169689Skan
695169689Skan  while (!is_parallel_ctx (ctx))
696169689Skan    {
697169689Skan      ctx = ctx->outer;
698169689Skan      if (ctx == NULL)
699169689Skan	return var;
700169689Skan      new_var = maybe_lookup_decl (var, ctx);
701169689Skan      if (new_var)
702169689Skan	return new_var;
703169689Skan    }
704169689Skan
705169689Skan  if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
706169689Skan    return var;
707169689Skan
708169689Skan  return error_mark_node;
709169689Skan}
710169689Skan
711169689Skan
712169689Skan/* Return the parallel region associated with STMT.  */
713169689Skan
714169689Skan/* Debugging dumps for parallel regions.  */
715169689Skanvoid dump_omp_region (FILE *, struct omp_region *, int);
716169689Skanvoid debug_omp_region (struct omp_region *);
717169689Skanvoid debug_all_omp_regions (void);
718169689Skan
719169689Skan/* Dump the parallel region tree rooted at REGION.  */
720169689Skan
721169689Skanvoid
722169689Skandump_omp_region (FILE *file, struct omp_region *region, int indent)
723169689Skan{
724169689Skan  fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
725169689Skan	   tree_code_name[region->type]);
726169689Skan
727169689Skan  if (region->inner)
728169689Skan    dump_omp_region (file, region->inner, indent + 4);
729169689Skan
730169689Skan  if (region->cont)
731169689Skan    {
732169689Skan      fprintf (file, "%*sbb %d: OMP_CONTINUE\n", indent, "",
733169689Skan	       region->cont->index);
734169689Skan    }
735169689Skan
736169689Skan  if (region->exit)
737169689Skan    fprintf (file, "%*sbb %d: OMP_RETURN\n", indent, "",
738169689Skan	     region->exit->index);
739169689Skan  else
740169689Skan    fprintf (file, "%*s[no exit marker]\n", indent, "");
741169689Skan
742169689Skan  if (region->next)
743169689Skan    dump_omp_region (file, region->next, indent);
744169689Skan}
745169689Skan
746169689Skanvoid
747169689Skandebug_omp_region (struct omp_region *region)
748169689Skan{
749169689Skan  dump_omp_region (stderr, region, 0);
750169689Skan}
751169689Skan
752169689Skanvoid
753169689Skandebug_all_omp_regions (void)
754169689Skan{
755169689Skan  dump_omp_region (stderr, root_omp_region, 0);
756169689Skan}
757169689Skan
758169689Skan
759169689Skan/* Create a new parallel region starting at STMT inside region PARENT.  */
760169689Skan
761169689Skanstruct omp_region *
762169689Skannew_omp_region (basic_block bb, enum tree_code type, struct omp_region *parent)
763169689Skan{
764169689Skan  struct omp_region *region = xcalloc (1, sizeof (*region));
765169689Skan
766169689Skan  region->outer = parent;
767169689Skan  region->entry = bb;
768169689Skan  region->type = type;
769169689Skan
770169689Skan  if (parent)
771169689Skan    {
772169689Skan      /* This is a nested region.  Add it to the list of inner
773169689Skan	 regions in PARENT.  */
774169689Skan      region->next = parent->inner;
775169689Skan      parent->inner = region;
776169689Skan    }
777169689Skan  else
778169689Skan    {
779169689Skan      /* This is a toplevel region.  Add it to the list of toplevel
780169689Skan	 regions in ROOT_OMP_REGION.  */
781169689Skan      region->next = root_omp_region;
782169689Skan      root_omp_region = region;
783169689Skan    }
784169689Skan
785169689Skan  return region;
786169689Skan}
787169689Skan
788169689Skan/* Release the memory associated with the region tree rooted at REGION.  */
789169689Skan
790169689Skanstatic void
791169689Skanfree_omp_region_1 (struct omp_region *region)
792169689Skan{
793169689Skan  struct omp_region *i, *n;
794169689Skan
795169689Skan  for (i = region->inner; i ; i = n)
796169689Skan    {
797169689Skan      n = i->next;
798169689Skan      free_omp_region_1 (i);
799169689Skan    }
800169689Skan
801169689Skan  free (region);
802169689Skan}
803169689Skan
804169689Skan/* Release the memory for the entire omp region tree.  */
805169689Skan
806169689Skanvoid
807169689Skanfree_omp_regions (void)
808169689Skan{
809169689Skan  struct omp_region *r, *n;
810169689Skan  for (r = root_omp_region; r ; r = n)
811169689Skan    {
812169689Skan      n = r->next;
813169689Skan      free_omp_region_1 (r);
814169689Skan    }
815169689Skan  root_omp_region = NULL;
816169689Skan}
817169689Skan
818169689Skan
819169689Skan/* Create a new context, with OUTER_CTX being the surrounding context.  */
820169689Skan
821169689Skanstatic omp_context *
822169689Skannew_omp_context (tree stmt, omp_context *outer_ctx)
823169689Skan{
824169689Skan  omp_context *ctx = XCNEW (omp_context);
825169689Skan
826169689Skan  splay_tree_insert (all_contexts, (splay_tree_key) stmt,
827169689Skan		     (splay_tree_value) ctx);
828169689Skan  ctx->stmt = stmt;
829169689Skan
830169689Skan  if (outer_ctx)
831169689Skan    {
832169689Skan      ctx->outer = outer_ctx;
833169689Skan      ctx->cb = outer_ctx->cb;
834169689Skan      ctx->cb.block = NULL;
835169689Skan      ctx->depth = outer_ctx->depth + 1;
836169689Skan    }
837169689Skan  else
838169689Skan    {
839169689Skan      ctx->cb.src_fn = current_function_decl;
840169689Skan      ctx->cb.dst_fn = current_function_decl;
841169689Skan      ctx->cb.src_node = cgraph_node (current_function_decl);
842169689Skan      ctx->cb.dst_node = ctx->cb.src_node;
843169689Skan      ctx->cb.src_cfun = cfun;
844169689Skan      ctx->cb.copy_decl = omp_copy_decl;
845169689Skan      ctx->cb.eh_region = -1;
846169689Skan      ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
847169689Skan      ctx->depth = 1;
848169689Skan    }
849169689Skan
850169689Skan  ctx->cb.decl_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
851169689Skan
852169689Skan  return ctx;
853169689Skan}
854169689Skan
855169689Skan/* Destroy a omp_context data structures.  Called through the splay tree
856169689Skan   value delete callback.  */
857169689Skan
858169689Skanstatic void
859169689Skandelete_omp_context (splay_tree_value value)
860169689Skan{
861169689Skan  omp_context *ctx = (omp_context *) value;
862169689Skan
863169689Skan  splay_tree_delete (ctx->cb.decl_map);
864169689Skan
865169689Skan  if (ctx->field_map)
866169689Skan    splay_tree_delete (ctx->field_map);
867169689Skan
868169689Skan  /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
869169689Skan     it produces corrupt debug information.  */
870169689Skan  if (ctx->record_type)
871169689Skan    {
872169689Skan      tree t;
873169689Skan      for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
874169689Skan	DECL_ABSTRACT_ORIGIN (t) = NULL;
875169689Skan    }
876169689Skan
877169689Skan  XDELETE (ctx);
878169689Skan}
879169689Skan
880169689Skan/* Fix up RECEIVER_DECL with a type that has been remapped to the child
881169689Skan   context.  */
882169689Skan
883169689Skanstatic void
884169689Skanfixup_child_record_type (omp_context *ctx)
885169689Skan{
886169689Skan  tree f, type = ctx->record_type;
887169689Skan
888169689Skan  /* ??? It isn't sufficient to just call remap_type here, because
889169689Skan     variably_modified_type_p doesn't work the way we expect for
890169689Skan     record types.  Testing each field for whether it needs remapping
891169689Skan     and creating a new record by hand works, however.  */
892169689Skan  for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
893169689Skan    if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
894169689Skan      break;
895169689Skan  if (f)
896169689Skan    {
897169689Skan      tree name, new_fields = NULL;
898169689Skan
899169689Skan      type = lang_hooks.types.make_type (RECORD_TYPE);
900169689Skan      name = DECL_NAME (TYPE_NAME (ctx->record_type));
901169689Skan      name = build_decl (TYPE_DECL, name, type);
902169689Skan      TYPE_NAME (type) = name;
903169689Skan
904169689Skan      for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
905169689Skan	{
906169689Skan	  tree new_f = copy_node (f);
907169689Skan	  DECL_CONTEXT (new_f) = type;
908169689Skan	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
909169689Skan	  TREE_CHAIN (new_f) = new_fields;
910169689Skan	  new_fields = new_f;
911169689Skan
912169689Skan	  /* Arrange to be able to look up the receiver field
913169689Skan	     given the sender field.  */
914169689Skan	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
915169689Skan			     (splay_tree_value) new_f);
916169689Skan	}
917169689Skan      TYPE_FIELDS (type) = nreverse (new_fields);
918169689Skan      layout_type (type);
919169689Skan    }
920169689Skan
921169689Skan  TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
922169689Skan}
923169689Skan
924169689Skan/* Instantiate decls as necessary in CTX to satisfy the data sharing
925169689Skan   specified by CLAUSES.  */
926169689Skan
927169689Skanstatic void
928169689Skanscan_sharing_clauses (tree clauses, omp_context *ctx)
929169689Skan{
930169689Skan  tree c, decl;
931169689Skan  bool scan_array_reductions = false;
932169689Skan
933169689Skan  for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
934169689Skan    {
935169689Skan      bool by_ref;
936169689Skan
937169689Skan      switch (OMP_CLAUSE_CODE (c))
938169689Skan	{
939169689Skan	case OMP_CLAUSE_PRIVATE:
940169689Skan	  decl = OMP_CLAUSE_DECL (c);
941169689Skan	  if (!is_variable_sized (decl))
942169689Skan	    install_var_local (decl, ctx);
943169689Skan	  break;
944169689Skan
945169689Skan	case OMP_CLAUSE_SHARED:
946169689Skan	  gcc_assert (is_parallel_ctx (ctx));
947169689Skan	  decl = OMP_CLAUSE_DECL (c);
948169689Skan	  gcc_assert (!is_variable_sized (decl));
949169689Skan	  by_ref = use_pointer_for_field (decl, true);
950169689Skan	  /* Global variables don't need to be copied,
951169689Skan	     the receiver side will use them directly.  */
952169689Skan	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
953169689Skan	    break;
954169689Skan	  if (! TREE_READONLY (decl)
955169689Skan	      || TREE_ADDRESSABLE (decl)
956169689Skan	      || by_ref
957169689Skan	      || is_reference (decl))
958169689Skan	    {
959169689Skan	      install_var_field (decl, by_ref, ctx);
960169689Skan	      install_var_local (decl, ctx);
961169689Skan	      break;
962169689Skan	    }
963169689Skan	  /* We don't need to copy const scalar vars back.  */
964169689Skan	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
965169689Skan	  goto do_private;
966169689Skan
967169689Skan	case OMP_CLAUSE_LASTPRIVATE:
968169689Skan	  /* Let the corresponding firstprivate clause create
969169689Skan	     the variable.  */
970169689Skan	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
971169689Skan	    break;
972169689Skan	  /* FALLTHRU */
973169689Skan
974169689Skan	case OMP_CLAUSE_FIRSTPRIVATE:
975169689Skan	case OMP_CLAUSE_REDUCTION:
976169689Skan	  decl = OMP_CLAUSE_DECL (c);
977169689Skan	do_private:
978169689Skan	  if (is_variable_sized (decl))
979169689Skan	    break;
980169689Skan	  else if (is_parallel_ctx (ctx)
981169689Skan		   && ! is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
982169689Skan								       ctx)))
983169689Skan	    {
984169689Skan	      by_ref = use_pointer_for_field (decl, false);
985169689Skan	      install_var_field (decl, by_ref, ctx);
986169689Skan	    }
987169689Skan	  install_var_local (decl, ctx);
988169689Skan	  break;
989169689Skan
990169689Skan	case OMP_CLAUSE_COPYPRIVATE:
991169689Skan	  if (ctx->outer)
992169689Skan	    scan_omp (&OMP_CLAUSE_DECL (c), ctx->outer);
993169689Skan	  /* FALLTHRU */
994169689Skan
995169689Skan	case OMP_CLAUSE_COPYIN:
996169689Skan	  decl = OMP_CLAUSE_DECL (c);
997169689Skan	  by_ref = use_pointer_for_field (decl, false);
998169689Skan	  install_var_field (decl, by_ref, ctx);
999169689Skan	  break;
1000169689Skan
1001169689Skan	case OMP_CLAUSE_DEFAULT:
1002169689Skan	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1003169689Skan	  break;
1004169689Skan
1005169689Skan	case OMP_CLAUSE_IF:
1006169689Skan	case OMP_CLAUSE_NUM_THREADS:
1007169689Skan	case OMP_CLAUSE_SCHEDULE:
1008169689Skan	  if (ctx->outer)
1009169689Skan	    scan_omp (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1010169689Skan	  break;
1011169689Skan
1012169689Skan	case OMP_CLAUSE_NOWAIT:
1013169689Skan	case OMP_CLAUSE_ORDERED:
1014169689Skan	  break;
1015169689Skan
1016169689Skan	default:
1017169689Skan	  gcc_unreachable ();
1018169689Skan	}
1019169689Skan    }
1020169689Skan
1021169689Skan  for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1022169689Skan    {
1023169689Skan      switch (OMP_CLAUSE_CODE (c))
1024169689Skan	{
1025169689Skan	case OMP_CLAUSE_LASTPRIVATE:
1026169689Skan	  /* Let the corresponding firstprivate clause create
1027169689Skan	     the variable.  */
1028169689Skan	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1029169689Skan	    break;
1030169689Skan	  /* FALLTHRU */
1031169689Skan
1032169689Skan	case OMP_CLAUSE_PRIVATE:
1033169689Skan	case OMP_CLAUSE_FIRSTPRIVATE:
1034169689Skan	case OMP_CLAUSE_REDUCTION:
1035169689Skan	  decl = OMP_CLAUSE_DECL (c);
1036169689Skan	  if (is_variable_sized (decl))
1037169689Skan	    install_var_local (decl, ctx);
1038169689Skan	  fixup_remapped_decl (decl, ctx,
1039169689Skan			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1040169689Skan			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1041169689Skan	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1042169689Skan	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1043169689Skan	    scan_array_reductions = true;
1044169689Skan	  break;
1045169689Skan
1046169689Skan	case OMP_CLAUSE_SHARED:
1047169689Skan	  decl = OMP_CLAUSE_DECL (c);
1048169689Skan	  if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1049169689Skan	    fixup_remapped_decl (decl, ctx, false);
1050169689Skan	  break;
1051169689Skan
1052169689Skan	case OMP_CLAUSE_COPYPRIVATE:
1053169689Skan	case OMP_CLAUSE_COPYIN:
1054169689Skan	case OMP_CLAUSE_DEFAULT:
1055169689Skan	case OMP_CLAUSE_IF:
1056169689Skan	case OMP_CLAUSE_NUM_THREADS:
1057169689Skan	case OMP_CLAUSE_SCHEDULE:
1058169689Skan	case OMP_CLAUSE_NOWAIT:
1059169689Skan	case OMP_CLAUSE_ORDERED:
1060169689Skan	  break;
1061169689Skan
1062169689Skan	default:
1063169689Skan	  gcc_unreachable ();
1064169689Skan	}
1065169689Skan    }
1066169689Skan
1067169689Skan  if (scan_array_reductions)
1068169689Skan    for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1069169689Skan      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1070169689Skan	  && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1071169689Skan	{
1072169689Skan	  scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
1073169689Skan	  scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
1074169689Skan	}
1075169689Skan}
1076169689Skan
1077169689Skan/* Create a new name for omp child function.  Returns an identifier.  */
1078169689Skan
1079169689Skanstatic GTY(()) unsigned int tmp_ompfn_id_num;
1080169689Skan
1081169689Skanstatic tree
1082169689Skancreate_omp_child_function_name (void)
1083169689Skan{
1084169689Skan  tree name = DECL_ASSEMBLER_NAME (current_function_decl);
1085169689Skan  size_t len = IDENTIFIER_LENGTH (name);
1086169689Skan  char *tmp_name, *prefix;
1087169689Skan
1088169689Skan  prefix = alloca (len + sizeof ("_omp_fn"));
1089169689Skan  memcpy (prefix, IDENTIFIER_POINTER (name), len);
1090169689Skan  strcpy (prefix + len, "_omp_fn");
1091169689Skan#ifndef NO_DOT_IN_LABEL
1092169689Skan  prefix[len] = '.';
1093169689Skan#elif !defined NO_DOLLAR_IN_LABEL
1094169689Skan  prefix[len] = '$';
1095169689Skan#endif
1096169689Skan  ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, tmp_ompfn_id_num++);
1097169689Skan  return get_identifier (tmp_name);
1098169689Skan}
1099169689Skan
1100169689Skan/* Build a decl for the omp child function.  It'll not contain a body
1101169689Skan   yet, just the bare decl.  */
1102169689Skan
1103169689Skanstatic void
1104169689Skancreate_omp_child_function (omp_context *ctx)
1105169689Skan{
1106169689Skan  tree decl, type, name, t;
1107169689Skan
1108169689Skan  name = create_omp_child_function_name ();
1109169689Skan  type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1110169689Skan
1111169689Skan  decl = build_decl (FUNCTION_DECL, name, type);
1112169689Skan  decl = lang_hooks.decls.pushdecl (decl);
1113169689Skan
1114169689Skan  ctx->cb.dst_fn = decl;
1115169689Skan
1116169689Skan  TREE_STATIC (decl) = 1;
1117169689Skan  TREE_USED (decl) = 1;
1118169689Skan  DECL_ARTIFICIAL (decl) = 1;
1119169689Skan  DECL_IGNORED_P (decl) = 0;
1120169689Skan  TREE_PUBLIC (decl) = 0;
1121169689Skan  DECL_UNINLINABLE (decl) = 1;
1122169689Skan  DECL_EXTERNAL (decl) = 0;
1123169689Skan  DECL_CONTEXT (decl) = NULL_TREE;
1124169689Skan  DECL_INITIAL (decl) = make_node (BLOCK);
1125169689Skan
1126169689Skan  t = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1127169689Skan  DECL_ARTIFICIAL (t) = 1;
1128169689Skan  DECL_IGNORED_P (t) = 1;
1129169689Skan  DECL_RESULT (decl) = t;
1130169689Skan
1131169689Skan  t = build_decl (PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1132169689Skan  DECL_ARTIFICIAL (t) = 1;
1133169689Skan  DECL_ARG_TYPE (t) = ptr_type_node;
1134169689Skan  DECL_CONTEXT (t) = current_function_decl;
1135169689Skan  TREE_USED (t) = 1;
1136169689Skan  DECL_ARGUMENTS (decl) = t;
1137169689Skan  ctx->receiver_decl = t;
1138169689Skan
1139169689Skan  /* Allocate memory for the function structure.  The call to
1140169689Skan     allocate_struct_function clobbers CFUN, so we need to restore
1141169689Skan     it afterward.  */
1142169689Skan  allocate_struct_function (decl);
1143169689Skan  DECL_SOURCE_LOCATION (decl) = EXPR_LOCATION (ctx->stmt);
1144169689Skan  cfun->function_end_locus = EXPR_LOCATION (ctx->stmt);
1145169689Skan  cfun = ctx->cb.src_cfun;
1146169689Skan}
1147169689Skan
1148169689Skan
1149169689Skan/* Scan an OpenMP parallel directive.  */
1150169689Skan
1151169689Skanstatic void
1152169689Skanscan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
1153169689Skan{
1154169689Skan  omp_context *ctx;
1155169689Skan  tree name;
1156169689Skan
1157169689Skan  /* Ignore parallel directives with empty bodies, unless there
1158169689Skan     are copyin clauses.  */
1159169689Skan  if (optimize > 0
1160169689Skan      && empty_body_p (OMP_PARALLEL_BODY (*stmt_p))
1161169689Skan      && find_omp_clause (OMP_CLAUSES (*stmt_p), OMP_CLAUSE_COPYIN) == NULL)
1162169689Skan    {
1163169689Skan      *stmt_p = build_empty_stmt ();
1164169689Skan      return;
1165169689Skan    }
1166169689Skan
1167169689Skan  ctx = new_omp_context (*stmt_p, outer_ctx);
1168169689Skan  if (parallel_nesting_level > 1)
1169169689Skan    ctx->is_nested = true;
1170169689Skan  ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1171169689Skan  ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1172169689Skan  ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1173169689Skan  name = create_tmp_var_name (".omp_data_s");
1174169689Skan  name = build_decl (TYPE_DECL, name, ctx->record_type);
1175169689Skan  TYPE_NAME (ctx->record_type) = name;
1176169689Skan  create_omp_child_function (ctx);
1177169689Skan  OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
1178169689Skan
1179169689Skan  scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
1180169689Skan  scan_omp (&OMP_PARALLEL_BODY (*stmt_p), ctx);
1181169689Skan
1182169689Skan  if (TYPE_FIELDS (ctx->record_type) == NULL)
1183169689Skan    ctx->record_type = ctx->receiver_decl = NULL;
1184169689Skan  else
1185169689Skan    {
1186169689Skan      layout_type (ctx->record_type);
1187169689Skan      fixup_child_record_type (ctx);
1188169689Skan    }
1189169689Skan}
1190169689Skan
1191169689Skan
1192169689Skan/* Scan an OpenMP loop directive.  */
1193169689Skan
1194169689Skanstatic void
1195169689Skanscan_omp_for (tree *stmt_p, omp_context *outer_ctx)
1196169689Skan{
1197169689Skan  omp_context *ctx;
1198169689Skan  tree stmt;
1199169689Skan
1200169689Skan  stmt = *stmt_p;
1201169689Skan  ctx = new_omp_context (stmt, outer_ctx);
1202169689Skan
1203169689Skan  scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
1204169689Skan
1205169689Skan  scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
1206169689Skan  scan_omp (&OMP_FOR_INIT (stmt), ctx);
1207169689Skan  scan_omp (&OMP_FOR_COND (stmt), ctx);
1208169689Skan  scan_omp (&OMP_FOR_INCR (stmt), ctx);
1209169689Skan  scan_omp (&OMP_FOR_BODY (stmt), ctx);
1210169689Skan}
1211169689Skan
1212169689Skan/* Scan an OpenMP sections directive.  */
1213169689Skan
1214169689Skanstatic void
1215169689Skanscan_omp_sections (tree *stmt_p, omp_context *outer_ctx)
1216169689Skan{
1217169689Skan  tree stmt;
1218169689Skan  omp_context *ctx;
1219169689Skan
1220169689Skan  stmt = *stmt_p;
1221169689Skan  ctx = new_omp_context (stmt, outer_ctx);
1222169689Skan  scan_sharing_clauses (OMP_SECTIONS_CLAUSES (stmt), ctx);
1223169689Skan  scan_omp (&OMP_SECTIONS_BODY (stmt), ctx);
1224169689Skan}
1225169689Skan
1226169689Skan/* Scan an OpenMP single directive.  */
1227169689Skan
1228169689Skanstatic void
1229169689Skanscan_omp_single (tree *stmt_p, omp_context *outer_ctx)
1230169689Skan{
1231169689Skan  tree stmt = *stmt_p;
1232169689Skan  omp_context *ctx;
1233169689Skan  tree name;
1234169689Skan
1235169689Skan  ctx = new_omp_context (stmt, outer_ctx);
1236169689Skan  ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1237169689Skan  ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1238169689Skan  name = create_tmp_var_name (".omp_copy_s");
1239169689Skan  name = build_decl (TYPE_DECL, name, ctx->record_type);
1240169689Skan  TYPE_NAME (ctx->record_type) = name;
1241169689Skan
1242169689Skan  scan_sharing_clauses (OMP_SINGLE_CLAUSES (stmt), ctx);
1243169689Skan  scan_omp (&OMP_SINGLE_BODY (stmt), ctx);
1244169689Skan
1245169689Skan  if (TYPE_FIELDS (ctx->record_type) == NULL)
1246169689Skan    ctx->record_type = NULL;
1247169689Skan  else
1248169689Skan    layout_type (ctx->record_type);
1249169689Skan}
1250169689Skan
1251169689Skan
1252169689Skan/* Check OpenMP nesting restrictions.  */
1253169689Skanstatic void
1254169689Skancheck_omp_nesting_restrictions (tree t, omp_context *ctx)
1255169689Skan{
1256169689Skan  switch (TREE_CODE (t))
1257169689Skan    {
1258169689Skan    case OMP_FOR:
1259169689Skan    case OMP_SECTIONS:
1260169689Skan    case OMP_SINGLE:
1261169689Skan      for (; ctx != NULL; ctx = ctx->outer)
1262169689Skan	switch (TREE_CODE (ctx->stmt))
1263169689Skan	  {
1264169689Skan	  case OMP_FOR:
1265169689Skan	  case OMP_SECTIONS:
1266169689Skan	  case OMP_SINGLE:
1267169689Skan	  case OMP_ORDERED:
1268169689Skan	  case OMP_MASTER:
1269169689Skan	    warning (0, "work-sharing region may not be closely nested inside "
1270169689Skan			"of work-sharing, critical, ordered or master region");
1271169689Skan	    return;
1272169689Skan	  case OMP_PARALLEL:
1273169689Skan	    return;
1274169689Skan	  default:
1275169689Skan	    break;
1276169689Skan	  }
1277169689Skan      break;
1278169689Skan    case OMP_MASTER:
1279169689Skan      for (; ctx != NULL; ctx = ctx->outer)
1280169689Skan	switch (TREE_CODE (ctx->stmt))
1281169689Skan	  {
1282169689Skan	  case OMP_FOR:
1283169689Skan	  case OMP_SECTIONS:
1284169689Skan	  case OMP_SINGLE:
1285169689Skan	    warning (0, "master region may not be closely nested inside "
1286169689Skan			"of work-sharing region");
1287169689Skan	    return;
1288169689Skan	  case OMP_PARALLEL:
1289169689Skan	    return;
1290169689Skan	  default:
1291169689Skan	    break;
1292169689Skan	  }
1293169689Skan      break;
1294169689Skan    case OMP_ORDERED:
1295169689Skan      for (; ctx != NULL; ctx = ctx->outer)
1296169689Skan	switch (TREE_CODE (ctx->stmt))
1297169689Skan	  {
1298169689Skan	  case OMP_CRITICAL:
1299169689Skan	    warning (0, "ordered region may not be closely nested inside "
1300169689Skan			"of critical region");
1301169689Skan	    return;
1302169689Skan	  case OMP_FOR:
1303169689Skan	    if (find_omp_clause (OMP_CLAUSES (ctx->stmt),
1304169689Skan				 OMP_CLAUSE_ORDERED) == NULL)
1305169689Skan	      warning (0, "ordered region must be closely nested inside "
1306169689Skan			  "a loop region with an ordered clause");
1307169689Skan	    return;
1308169689Skan	  case OMP_PARALLEL:
1309169689Skan	    return;
1310169689Skan	  default:
1311169689Skan	    break;
1312169689Skan	  }
1313169689Skan      break;
1314169689Skan    case OMP_CRITICAL:
1315169689Skan      for (; ctx != NULL; ctx = ctx->outer)
1316169689Skan	if (TREE_CODE (ctx->stmt) == OMP_CRITICAL
1317169689Skan	    && OMP_CRITICAL_NAME (t) == OMP_CRITICAL_NAME (ctx->stmt))
1318169689Skan	  {
1319169689Skan	    warning (0, "critical region may not be nested inside a critical "
1320169689Skan			"region with the same name");
1321169689Skan	    return;
1322169689Skan	  }
1323169689Skan      break;
1324169689Skan    default:
1325169689Skan      break;
1326169689Skan    }
1327169689Skan}
1328169689Skan
1329169689Skan
1330169689Skan/* Callback for walk_stmts used to scan for OpenMP directives at TP.  */
1331169689Skan
1332169689Skanstatic tree
1333169689Skanscan_omp_1 (tree *tp, int *walk_subtrees, void *data)
1334169689Skan{
1335169689Skan  struct walk_stmt_info *wi = data;
1336169689Skan  omp_context *ctx = wi->info;
1337169689Skan  tree t = *tp;
1338169689Skan
1339169689Skan  if (EXPR_HAS_LOCATION (t))
1340169689Skan    input_location = EXPR_LOCATION (t);
1341169689Skan
1342169689Skan  /* Check the OpenMP nesting restrictions.  */
1343169689Skan  if (OMP_DIRECTIVE_P (t) && ctx != NULL)
1344169689Skan    check_omp_nesting_restrictions (t, ctx);
1345169689Skan
1346169689Skan  *walk_subtrees = 0;
1347169689Skan  switch (TREE_CODE (t))
1348169689Skan    {
1349169689Skan    case OMP_PARALLEL:
1350169689Skan      parallel_nesting_level++;
1351169689Skan      scan_omp_parallel (tp, ctx);
1352169689Skan      parallel_nesting_level--;
1353169689Skan      break;
1354169689Skan
1355169689Skan    case OMP_FOR:
1356169689Skan      scan_omp_for (tp, ctx);
1357169689Skan      break;
1358169689Skan
1359169689Skan    case OMP_SECTIONS:
1360169689Skan      scan_omp_sections (tp, ctx);
1361169689Skan      break;
1362169689Skan
1363169689Skan    case OMP_SINGLE:
1364169689Skan      scan_omp_single (tp, ctx);
1365169689Skan      break;
1366169689Skan
1367169689Skan    case OMP_SECTION:
1368169689Skan    case OMP_MASTER:
1369169689Skan    case OMP_ORDERED:
1370169689Skan    case OMP_CRITICAL:
1371169689Skan      ctx = new_omp_context (*tp, ctx);
1372169689Skan      scan_omp (&OMP_BODY (*tp), ctx);
1373169689Skan      break;
1374169689Skan
1375169689Skan    case BIND_EXPR:
1376169689Skan      {
1377169689Skan	tree var;
1378169689Skan	*walk_subtrees = 1;
1379169689Skan
1380169689Skan	for (var = BIND_EXPR_VARS (t); var ; var = TREE_CHAIN (var))
1381169689Skan	  insert_decl_map (&ctx->cb, var, var);
1382169689Skan      }
1383169689Skan      break;
1384169689Skan
1385169689Skan    case VAR_DECL:
1386169689Skan    case PARM_DECL:
1387169689Skan    case LABEL_DECL:
1388169689Skan    case RESULT_DECL:
1389169689Skan      if (ctx)
1390169689Skan	*tp = remap_decl (t, &ctx->cb);
1391169689Skan      break;
1392169689Skan
1393169689Skan    default:
1394169689Skan      if (ctx && TYPE_P (t))
1395169689Skan	*tp = remap_type (t, &ctx->cb);
1396169689Skan      else if (!DECL_P (t))
1397169689Skan	*walk_subtrees = 1;
1398169689Skan      break;
1399169689Skan    }
1400169689Skan
1401169689Skan  return NULL_TREE;
1402169689Skan}
1403169689Skan
1404169689Skan
1405169689Skan/* Scan all the statements starting at STMT_P.  CTX contains context
1406169689Skan   information about the OpenMP directives and clauses found during
1407169689Skan   the scan.  */
1408169689Skan
1409169689Skanstatic void
1410169689Skanscan_omp (tree *stmt_p, omp_context *ctx)
1411169689Skan{
1412169689Skan  location_t saved_location;
1413169689Skan  struct walk_stmt_info wi;
1414169689Skan
1415169689Skan  memset (&wi, 0, sizeof (wi));
1416169689Skan  wi.callback = scan_omp_1;
1417169689Skan  wi.info = ctx;
1418169689Skan  wi.want_bind_expr = (ctx != NULL);
1419169689Skan  wi.want_locations = true;
1420169689Skan
1421169689Skan  saved_location = input_location;
1422169689Skan  walk_stmts (&wi, stmt_p);
1423169689Skan  input_location = saved_location;
1424169689Skan}
1425169689Skan
1426169689Skan/* Re-gimplification and code generation routines.  */
1427169689Skan
1428169689Skan/* Build a call to GOMP_barrier.  */
1429169689Skan
1430169689Skanstatic void
1431169689Skanbuild_omp_barrier (tree *stmt_list)
1432169689Skan{
1433169689Skan  tree t;
1434169689Skan
1435169689Skan  t = built_in_decls[BUILT_IN_GOMP_BARRIER];
1436169689Skan  t = build_function_call_expr (t, NULL);
1437169689Skan  gimplify_and_add (t, stmt_list);
1438169689Skan}
1439169689Skan
1440169689Skan/* If a context was created for STMT when it was scanned, return it.  */
1441169689Skan
1442169689Skanstatic omp_context *
1443169689Skanmaybe_lookup_ctx (tree stmt)
1444169689Skan{
1445169689Skan  splay_tree_node n;
1446169689Skan  n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
1447169689Skan  return n ? (omp_context *) n->value : NULL;
1448169689Skan}
1449169689Skan
1450169689Skan
1451169689Skan/* Find the mapping for DECL in CTX or the immediately enclosing
1452169689Skan   context that has a mapping for DECL.
1453169689Skan
1454169689Skan   If CTX is a nested parallel directive, we may have to use the decl
1455169689Skan   mappings created in CTX's parent context.  Suppose that we have the
1456169689Skan   following parallel nesting (variable UIDs showed for clarity):
1457169689Skan
1458169689Skan	iD.1562 = 0;
1459169689Skan     	#omp parallel shared(iD.1562)		-> outer parallel
1460169689Skan	  iD.1562 = iD.1562 + 1;
1461169689Skan
1462169689Skan	  #omp parallel shared (iD.1562)	-> inner parallel
1463169689Skan	     iD.1562 = iD.1562 - 1;
1464169689Skan
1465169689Skan   Each parallel structure will create a distinct .omp_data_s structure
1466169689Skan   for copying iD.1562 in/out of the directive:
1467169689Skan
1468169689Skan  	outer parallel		.omp_data_s.1.i -> iD.1562
1469169689Skan	inner parallel		.omp_data_s.2.i -> iD.1562
1470169689Skan
1471169689Skan   A shared variable mapping will produce a copy-out operation before
1472169689Skan   the parallel directive and a copy-in operation after it.  So, in
1473169689Skan   this case we would have:
1474169689Skan
1475169689Skan  	iD.1562 = 0;
1476169689Skan	.omp_data_o.1.i = iD.1562;
1477169689Skan	#omp parallel shared(iD.1562)		-> outer parallel
1478169689Skan	  .omp_data_i.1 = &.omp_data_o.1
1479169689Skan	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
1480169689Skan
1481169689Skan	  .omp_data_o.2.i = iD.1562;		-> **
1482169689Skan	  #omp parallel shared(iD.1562)		-> inner parallel
1483169689Skan	    .omp_data_i.2 = &.omp_data_o.2
1484169689Skan	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
1485169689Skan
1486169689Skan
1487169689Skan    ** This is a problem.  The symbol iD.1562 cannot be referenced
1488169689Skan       inside the body of the outer parallel region.  But since we are
1489169689Skan       emitting this copy operation while expanding the inner parallel
1490169689Skan       directive, we need to access the CTX structure of the outer
1491169689Skan       parallel directive to get the correct mapping:
1492169689Skan
1493169689Skan	  .omp_data_o.2.i = .omp_data_i.1->i
1494169689Skan
1495169689Skan    Since there may be other workshare or parallel directives enclosing
1496169689Skan    the parallel directive, it may be necessary to walk up the context
1497169689Skan    parent chain.  This is not a problem in general because nested
1498169689Skan    parallelism happens only rarely.  */
1499169689Skan
1500169689Skanstatic tree
1501169689Skanlookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
1502169689Skan{
1503169689Skan  tree t;
1504169689Skan  omp_context *up;
1505169689Skan
1506169689Skan  gcc_assert (ctx->is_nested);
1507169689Skan
1508169689Skan  for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
1509169689Skan    t = maybe_lookup_decl (decl, up);
1510169689Skan
1511171825Skan  gcc_assert (t || is_global_var (decl));
1512169689Skan
1513171825Skan  return t ? t : decl;
1514169689Skan}
1515169689Skan
1516169689Skan
1517169689Skan/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
1518169689Skan   in outer contexts.  */
1519169689Skan
1520169689Skanstatic tree
1521169689Skanmaybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
1522169689Skan{
1523169689Skan  tree t = NULL;
1524169689Skan  omp_context *up;
1525169689Skan
1526169689Skan  if (ctx->is_nested)
1527169689Skan    for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
1528169689Skan      t = maybe_lookup_decl (decl, up);
1529169689Skan
1530169689Skan  return t ? t : decl;
1531169689Skan}
1532169689Skan
1533169689Skan
1534169689Skan/* Construct the initialization value for reduction CLAUSE.  */
1535169689Skan
1536169689Skantree
1537169689Skanomp_reduction_init (tree clause, tree type)
1538169689Skan{
1539169689Skan  switch (OMP_CLAUSE_REDUCTION_CODE (clause))
1540169689Skan    {
1541169689Skan    case PLUS_EXPR:
1542169689Skan    case MINUS_EXPR:
1543169689Skan    case BIT_IOR_EXPR:
1544169689Skan    case BIT_XOR_EXPR:
1545169689Skan    case TRUTH_OR_EXPR:
1546169689Skan    case TRUTH_ORIF_EXPR:
1547169689Skan    case TRUTH_XOR_EXPR:
1548169689Skan    case NE_EXPR:
1549169689Skan      return fold_convert (type, integer_zero_node);
1550169689Skan
1551169689Skan    case MULT_EXPR:
1552169689Skan    case TRUTH_AND_EXPR:
1553169689Skan    case TRUTH_ANDIF_EXPR:
1554169689Skan    case EQ_EXPR:
1555169689Skan      return fold_convert (type, integer_one_node);
1556169689Skan
1557169689Skan    case BIT_AND_EXPR:
1558169689Skan      return fold_convert (type, integer_minus_one_node);
1559169689Skan
1560169689Skan    case MAX_EXPR:
1561169689Skan      if (SCALAR_FLOAT_TYPE_P (type))
1562169689Skan	{
1563169689Skan	  REAL_VALUE_TYPE max, min;
1564169689Skan	  if (HONOR_INFINITIES (TYPE_MODE (type)))
1565169689Skan	    {
1566169689Skan	      real_inf (&max);
1567169689Skan	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
1568169689Skan	    }
1569169689Skan	  else
1570169689Skan	    real_maxval (&min, 1, TYPE_MODE (type));
1571169689Skan	  return build_real (type, min);
1572169689Skan	}
1573169689Skan      else
1574169689Skan	{
1575169689Skan	  gcc_assert (INTEGRAL_TYPE_P (type));
1576169689Skan	  return TYPE_MIN_VALUE (type);
1577169689Skan	}
1578169689Skan
1579169689Skan    case MIN_EXPR:
1580169689Skan      if (SCALAR_FLOAT_TYPE_P (type))
1581169689Skan	{
1582169689Skan	  REAL_VALUE_TYPE max;
1583169689Skan	  if (HONOR_INFINITIES (TYPE_MODE (type)))
1584169689Skan	    real_inf (&max);
1585169689Skan	  else
1586169689Skan	    real_maxval (&max, 0, TYPE_MODE (type));
1587169689Skan	  return build_real (type, max);
1588169689Skan	}
1589169689Skan      else
1590169689Skan	{
1591169689Skan	  gcc_assert (INTEGRAL_TYPE_P (type));
1592169689Skan	  return TYPE_MAX_VALUE (type);
1593169689Skan	}
1594169689Skan
1595169689Skan    default:
1596169689Skan      gcc_unreachable ();
1597169689Skan    }
1598169689Skan}
1599169689Skan
1600169689Skan/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
1601169689Skan   from the receiver (aka child) side and initializers for REFERENCE_TYPE
1602169689Skan   private variables.  Initialization statements go in ILIST, while calls
1603169689Skan   to destructors go in DLIST.  */
1604169689Skan
1605169689Skanstatic void
1606169689Skanlower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
1607169689Skan			 omp_context *ctx)
1608169689Skan{
1609169689Skan  tree_stmt_iterator diter;
1610169689Skan  tree c, dtor, copyin_seq, x, args, ptr;
1611169689Skan  bool copyin_by_ref = false;
1612169689Skan  bool lastprivate_firstprivate = false;
1613169689Skan  int pass;
1614169689Skan
1615169689Skan  *dlist = alloc_stmt_list ();
1616169689Skan  diter = tsi_start (*dlist);
1617169689Skan  copyin_seq = NULL;
1618169689Skan
1619169689Skan  /* Do all the fixed sized types in the first pass, and the variable sized
1620169689Skan     types in the second pass.  This makes sure that the scalar arguments to
1621169689Skan     the variable sized types are processed before we use them in the
1622169689Skan     variable sized operations.  */
1623169689Skan  for (pass = 0; pass < 2; ++pass)
1624169689Skan    {
1625169689Skan      for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1626169689Skan	{
1627169689Skan	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1628169689Skan	  tree var, new_var;
1629169689Skan	  bool by_ref;
1630169689Skan
1631169689Skan	  switch (c_kind)
1632169689Skan	    {
1633169689Skan	    case OMP_CLAUSE_PRIVATE:
1634169689Skan	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
1635169689Skan		continue;
1636169689Skan	      break;
1637169689Skan	    case OMP_CLAUSE_SHARED:
1638169689Skan	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
1639169689Skan		{
1640169689Skan		  gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
1641169689Skan		  continue;
1642169689Skan		}
1643169689Skan	    case OMP_CLAUSE_FIRSTPRIVATE:
1644169689Skan	    case OMP_CLAUSE_COPYIN:
1645169689Skan	    case OMP_CLAUSE_REDUCTION:
1646169689Skan	      break;
1647169689Skan	    case OMP_CLAUSE_LASTPRIVATE:
1648169689Skan	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1649169689Skan		{
1650169689Skan		  lastprivate_firstprivate = true;
1651169689Skan		  if (pass != 0)
1652169689Skan		    continue;
1653169689Skan		}
1654169689Skan	      break;
1655169689Skan	    default:
1656169689Skan	      continue;
1657169689Skan	    }
1658169689Skan
1659169689Skan	  new_var = var = OMP_CLAUSE_DECL (c);
1660169689Skan	  if (c_kind != OMP_CLAUSE_COPYIN)
1661169689Skan	    new_var = lookup_decl (var, ctx);
1662169689Skan
1663169689Skan	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
1664169689Skan	    {
1665169689Skan	      if (pass != 0)
1666169689Skan		continue;
1667169689Skan	    }
1668169689Skan	  else if (is_variable_sized (var))
1669169689Skan	    {
1670169689Skan	      /* For variable sized types, we need to allocate the
1671169689Skan		 actual storage here.  Call alloca and store the
1672169689Skan		 result in the pointer decl that we created elsewhere.  */
1673169689Skan	      if (pass == 0)
1674169689Skan		continue;
1675169689Skan
1676169689Skan	      ptr = DECL_VALUE_EXPR (new_var);
1677169689Skan	      gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
1678169689Skan	      ptr = TREE_OPERAND (ptr, 0);
1679169689Skan	      gcc_assert (DECL_P (ptr));
1680169689Skan
1681169689Skan	      x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
1682169689Skan	      args = tree_cons (NULL, x, NULL);
1683169689Skan	      x = built_in_decls[BUILT_IN_ALLOCA];
1684169689Skan	      x = build_function_call_expr (x, args);
1685169689Skan	      x = fold_convert (TREE_TYPE (ptr), x);
1686169689Skan	      x = build2 (MODIFY_EXPR, void_type_node, ptr, x);
1687169689Skan	      gimplify_and_add (x, ilist);
1688169689Skan	    }
1689169689Skan	  else if (is_reference (var))
1690169689Skan	    {
1691169689Skan	      /* For references that are being privatized for Fortran,
1692169689Skan		 allocate new backing storage for the new pointer
1693169689Skan		 variable.  This allows us to avoid changing all the
1694169689Skan		 code that expects a pointer to something that expects
1695169689Skan		 a direct variable.  Note that this doesn't apply to
1696169689Skan		 C++, since reference types are disallowed in data
1697169689Skan		 sharing clauses there, except for NRV optimized
1698169689Skan		 return values.  */
1699169689Skan	      if (pass == 0)
1700169689Skan		continue;
1701169689Skan
1702169689Skan	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
1703169689Skan	      if (TREE_CONSTANT (x))
1704169689Skan		{
1705169689Skan		  const char *name = NULL;
1706169689Skan		  if (DECL_NAME (var))
1707169689Skan		    name = IDENTIFIER_POINTER (DECL_NAME (new_var));
1708169689Skan
1709169689Skan		  x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
1710169689Skan					  name);
1711169689Skan		  gimple_add_tmp_var (x);
1712169689Skan		  x = build_fold_addr_expr_with_type (x, TREE_TYPE (new_var));
1713169689Skan		}
1714169689Skan	      else
1715169689Skan		{
1716169689Skan		  args = tree_cons (NULL, x, NULL);
1717169689Skan		  x = built_in_decls[BUILT_IN_ALLOCA];
1718169689Skan		  x = build_function_call_expr (x, args);
1719169689Skan		  x = fold_convert (TREE_TYPE (new_var), x);
1720169689Skan		}
1721169689Skan
1722169689Skan	      x = build2 (MODIFY_EXPR, void_type_node, new_var, x);
1723169689Skan	      gimplify_and_add (x, ilist);
1724169689Skan
1725169689Skan	      new_var = build_fold_indirect_ref (new_var);
1726169689Skan	    }
1727169689Skan	  else if (c_kind == OMP_CLAUSE_REDUCTION
1728169689Skan		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1729169689Skan	    {
1730169689Skan	      if (pass == 0)
1731169689Skan		continue;
1732169689Skan	    }
1733169689Skan	  else if (pass != 0)
1734169689Skan	    continue;
1735169689Skan
1736169689Skan	  switch (OMP_CLAUSE_CODE (c))
1737169689Skan	    {
1738169689Skan	    case OMP_CLAUSE_SHARED:
1739169689Skan	      /* Shared global vars are just accessed directly.  */
1740169689Skan	      if (is_global_var (new_var))
1741169689Skan		break;
1742169689Skan	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
1743169689Skan		 needs to be delayed until after fixup_child_record_type so
1744169689Skan		 that we get the correct type during the dereference.  */
1745169689Skan	      by_ref = use_pointer_for_field (var, true);
1746169689Skan	      x = build_receiver_ref (var, by_ref, ctx);
1747169689Skan	      SET_DECL_VALUE_EXPR (new_var, x);
1748169689Skan	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
1749169689Skan
1750169689Skan	      /* ??? If VAR is not passed by reference, and the variable
1751169689Skan		 hasn't been initialized yet, then we'll get a warning for
1752169689Skan		 the store into the omp_data_s structure.  Ideally, we'd be
1753169689Skan		 able to notice this and not store anything at all, but
1754169689Skan		 we're generating code too early.  Suppress the warning.  */
1755169689Skan	      if (!by_ref)
1756169689Skan		TREE_NO_WARNING (var) = 1;
1757169689Skan	      break;
1758169689Skan
1759169689Skan	    case OMP_CLAUSE_LASTPRIVATE:
1760169689Skan	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1761169689Skan		break;
1762169689Skan	      /* FALLTHRU */
1763169689Skan
1764169689Skan	    case OMP_CLAUSE_PRIVATE:
1765169689Skan	      x = lang_hooks.decls.omp_clause_default_ctor (c, new_var);
1766169689Skan	      if (x)
1767169689Skan		gimplify_and_add (x, ilist);
1768169689Skan	      /* FALLTHRU */
1769169689Skan
1770169689Skan	    do_dtor:
1771169689Skan	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
1772169689Skan	      if (x)
1773169689Skan		{
1774169689Skan		  dtor = x;
1775169689Skan		  gimplify_stmt (&dtor);
1776169689Skan		  tsi_link_before (&diter, dtor, TSI_SAME_STMT);
1777169689Skan		}
1778169689Skan	      break;
1779169689Skan
1780169689Skan	    case OMP_CLAUSE_FIRSTPRIVATE:
1781169689Skan	      x = build_outer_var_ref (var, ctx);
1782169689Skan	      x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
1783169689Skan	      gimplify_and_add (x, ilist);
1784169689Skan	      goto do_dtor;
1785169689Skan	      break;
1786169689Skan
1787169689Skan	    case OMP_CLAUSE_COPYIN:
1788169689Skan	      by_ref = use_pointer_for_field (var, false);
1789169689Skan	      x = build_receiver_ref (var, by_ref, ctx);
1790169689Skan	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
1791169689Skan	      append_to_statement_list (x, &copyin_seq);
1792169689Skan	      copyin_by_ref |= by_ref;
1793169689Skan	      break;
1794169689Skan
1795169689Skan	    case OMP_CLAUSE_REDUCTION:
1796169689Skan	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1797169689Skan		{
1798169689Skan		  gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
1799169689Skan		  OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
1800169689Skan		}
1801169689Skan	      else
1802169689Skan		{
1803169689Skan		  x = omp_reduction_init (c, TREE_TYPE (new_var));
1804169689Skan		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
1805169689Skan		  x = build2 (MODIFY_EXPR, void_type_node, new_var, x);
1806169689Skan		  gimplify_and_add (x, ilist);
1807169689Skan		}
1808169689Skan	      break;
1809169689Skan
1810169689Skan	    default:
1811169689Skan	      gcc_unreachable ();
1812169689Skan	    }
1813169689Skan	}
1814169689Skan    }
1815169689Skan
1816169689Skan  /* The copyin sequence is not to be executed by the main thread, since
1817169689Skan     that would result in self-copies.  Perhaps not visible to scalars,
1818169689Skan     but it certainly is to C++ operator=.  */
1819169689Skan  if (copyin_seq)
1820169689Skan    {
1821169689Skan      x = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
1822169689Skan      x = build_function_call_expr (x, NULL);
1823169689Skan      x = build2 (NE_EXPR, boolean_type_node, x,
1824169689Skan		  build_int_cst (TREE_TYPE (x), 0));
1825169689Skan      x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
1826169689Skan      gimplify_and_add (x, ilist);
1827169689Skan    }
1828169689Skan
1829169689Skan  /* If any copyin variable is passed by reference, we must ensure the
1830169689Skan     master thread doesn't modify it before it is copied over in all
1831169689Skan     threads.  Similarly for variables in both firstprivate and
1832169689Skan     lastprivate clauses we need to ensure the lastprivate copying
1833169689Skan     happens after firstprivate copying in all threads.  */
1834169689Skan  if (copyin_by_ref || lastprivate_firstprivate)
1835169689Skan    build_omp_barrier (ilist);
1836169689Skan}
1837169689Skan
1838169689Skan
1839169689Skan/* Generate code to implement the LASTPRIVATE clauses.  This is used for
1840169689Skan   both parallel and workshare constructs.  PREDICATE may be NULL if it's
1841169689Skan   always true.   */
1842169689Skan
1843169689Skanstatic void
1844169689Skanlower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
1845169689Skan			    omp_context *ctx)
1846169689Skan{
1847169689Skan  tree sub_list, x, c;
1848169689Skan
1849169689Skan  /* Early exit if there are no lastprivate clauses.  */
1850169689Skan  clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
1851169689Skan  if (clauses == NULL)
1852169689Skan    {
1853169689Skan      /* If this was a workshare clause, see if it had been combined
1854169689Skan	 with its parallel.  In that case, look for the clauses on the
1855169689Skan	 parallel statement itself.  */
1856169689Skan      if (is_parallel_ctx (ctx))
1857169689Skan	return;
1858169689Skan
1859169689Skan      ctx = ctx->outer;
1860169689Skan      if (ctx == NULL || !is_parallel_ctx (ctx))
1861169689Skan	return;
1862169689Skan
1863169689Skan      clauses = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
1864169689Skan				 OMP_CLAUSE_LASTPRIVATE);
1865169689Skan      if (clauses == NULL)
1866169689Skan	return;
1867169689Skan    }
1868169689Skan
1869169689Skan  sub_list = alloc_stmt_list ();
1870169689Skan
1871169689Skan  for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1872169689Skan    {
1873169689Skan      tree var, new_var;
1874169689Skan
1875169689Skan      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE)
1876169689Skan	continue;
1877169689Skan
1878169689Skan      var = OMP_CLAUSE_DECL (c);
1879169689Skan      new_var = lookup_decl (var, ctx);
1880169689Skan
1881169689Skan      x = build_outer_var_ref (var, ctx);
1882169689Skan      if (is_reference (var))
1883169689Skan	new_var = build_fold_indirect_ref (new_var);
1884169689Skan      x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
1885169689Skan      append_to_statement_list (x, &sub_list);
1886169689Skan    }
1887169689Skan
1888169689Skan  if (predicate)
1889169689Skan    x = build3 (COND_EXPR, void_type_node, predicate, sub_list, NULL);
1890169689Skan  else
1891169689Skan    x = sub_list;
1892169689Skan
1893169689Skan  gimplify_and_add (x, stmt_list);
1894169689Skan}
1895169689Skan
1896169689Skan
1897169689Skan/* Generate code to implement the REDUCTION clauses.  */
1898169689Skan
1899169689Skanstatic void
1900169689Skanlower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
1901169689Skan{
1902169689Skan  tree sub_list = NULL, x, c;
1903169689Skan  int count = 0;
1904169689Skan
1905169689Skan  /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
1906169689Skan     update in that case, otherwise use a lock.  */
1907169689Skan  for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
1908169689Skan    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
1909169689Skan      {
1910169689Skan	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1911169689Skan	  {
1912169689Skan	    /* Never use OMP_ATOMIC for array reductions.  */
1913169689Skan	    count = -1;
1914169689Skan	    break;
1915169689Skan	  }
1916169689Skan	count++;
1917169689Skan      }
1918169689Skan
1919169689Skan  if (count == 0)
1920169689Skan    return;
1921169689Skan
1922169689Skan  for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1923169689Skan    {
1924169689Skan      tree var, ref, new_var;
1925169689Skan      enum tree_code code;
1926169689Skan
1927169689Skan      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
1928169689Skan	continue;
1929169689Skan
1930169689Skan      var = OMP_CLAUSE_DECL (c);
1931169689Skan      new_var = lookup_decl (var, ctx);
1932169689Skan      if (is_reference (var))
1933169689Skan	new_var = build_fold_indirect_ref (new_var);
1934169689Skan      ref = build_outer_var_ref (var, ctx);
1935169689Skan      code = OMP_CLAUSE_REDUCTION_CODE (c);
1936169689Skan
1937169689Skan      /* reduction(-:var) sums up the partial results, so it acts
1938169689Skan	 identically to reduction(+:var).  */
1939169689Skan      if (code == MINUS_EXPR)
1940169689Skan        code = PLUS_EXPR;
1941169689Skan
1942169689Skan      if (count == 1)
1943169689Skan	{
1944169689Skan	  tree addr = build_fold_addr_expr (ref);
1945169689Skan
1946169689Skan	  addr = save_expr (addr);
1947169689Skan	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
1948169689Skan	  x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
1949169689Skan	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
1950169689Skan	  gimplify_and_add (x, stmt_list);
1951169689Skan	  return;
1952169689Skan	}
1953169689Skan
1954169689Skan      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1955169689Skan	{
1956169689Skan	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
1957169689Skan
1958169689Skan	  if (is_reference (var))
1959169689Skan	    ref = build_fold_addr_expr (ref);
1960169689Skan	  SET_DECL_VALUE_EXPR (placeholder, ref);
1961169689Skan	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
1962169689Skan	  gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), &sub_list);
1963169689Skan	  OMP_CLAUSE_REDUCTION_MERGE (c) = NULL;
1964169689Skan	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
1965169689Skan	}
1966169689Skan      else
1967169689Skan	{
1968169689Skan	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
1969169689Skan	  ref = build_outer_var_ref (var, ctx);
1970169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, ref, x);
1971169689Skan	  append_to_statement_list (x, &sub_list);
1972169689Skan	}
1973169689Skan    }
1974169689Skan
1975169689Skan  x = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
1976169689Skan  x = build_function_call_expr (x, NULL);
1977169689Skan  gimplify_and_add (x, stmt_list);
1978169689Skan
1979169689Skan  gimplify_and_add (sub_list, stmt_list);
1980169689Skan
1981169689Skan  x = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
1982169689Skan  x = build_function_call_expr (x, NULL);
1983169689Skan  gimplify_and_add (x, stmt_list);
1984169689Skan}
1985169689Skan
1986169689Skan
1987169689Skan/* Generate code to implement the COPYPRIVATE clauses.  */
1988169689Skan
1989169689Skanstatic void
1990169689Skanlower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
1991169689Skan			    omp_context *ctx)
1992169689Skan{
1993169689Skan  tree c;
1994169689Skan
1995169689Skan  for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1996169689Skan    {
1997169689Skan      tree var, ref, x;
1998169689Skan      bool by_ref;
1999169689Skan
2000169689Skan      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
2001169689Skan	continue;
2002169689Skan
2003169689Skan      var = OMP_CLAUSE_DECL (c);
2004169689Skan      by_ref = use_pointer_for_field (var, false);
2005169689Skan
2006169689Skan      ref = build_sender_ref (var, ctx);
2007169689Skan      x = (ctx->is_nested) ? lookup_decl_in_outer_ctx (var, ctx) : var;
2008169689Skan      x = by_ref ? build_fold_addr_expr (x) : x;
2009169689Skan      x = build2 (MODIFY_EXPR, void_type_node, ref, x);
2010169689Skan      gimplify_and_add (x, slist);
2011169689Skan
2012169689Skan      ref = build_receiver_ref (var, by_ref, ctx);
2013169689Skan      if (is_reference (var))
2014169689Skan	{
2015169689Skan	  ref = build_fold_indirect_ref (ref);
2016169689Skan	  var = build_fold_indirect_ref (var);
2017169689Skan	}
2018169689Skan      x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
2019169689Skan      gimplify_and_add (x, rlist);
2020169689Skan    }
2021169689Skan}
2022169689Skan
2023169689Skan
2024169689Skan/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2025169689Skan   and REDUCTION from the sender (aka parent) side.  */
2026169689Skan
2027169689Skanstatic void
2028169689Skanlower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
2029169689Skan{
2030169689Skan  tree c;
2031169689Skan
2032169689Skan  for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2033169689Skan    {
2034169689Skan      tree val, ref, x, var;
2035169689Skan      bool by_ref, do_in = false, do_out = false;
2036169689Skan
2037169689Skan      switch (OMP_CLAUSE_CODE (c))
2038169689Skan	{
2039169689Skan	case OMP_CLAUSE_FIRSTPRIVATE:
2040169689Skan	case OMP_CLAUSE_COPYIN:
2041169689Skan	case OMP_CLAUSE_LASTPRIVATE:
2042169689Skan	case OMP_CLAUSE_REDUCTION:
2043169689Skan	  break;
2044169689Skan	default:
2045169689Skan	  continue;
2046169689Skan	}
2047169689Skan
2048169689Skan      var = val = OMP_CLAUSE_DECL (c);
2049169689Skan      if (ctx->is_nested)
2050169689Skan	var = lookup_decl_in_outer_ctx (val, ctx);
2051169689Skan
2052169689Skan      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2053169689Skan	  && is_global_var (var))
2054169689Skan	continue;
2055169689Skan      if (is_variable_sized (val))
2056169689Skan	continue;
2057169689Skan      by_ref = use_pointer_for_field (val, false);
2058169689Skan
2059169689Skan      switch (OMP_CLAUSE_CODE (c))
2060169689Skan	{
2061169689Skan	case OMP_CLAUSE_FIRSTPRIVATE:
2062169689Skan	case OMP_CLAUSE_COPYIN:
2063169689Skan	  do_in = true;
2064169689Skan	  break;
2065169689Skan
2066169689Skan	case OMP_CLAUSE_LASTPRIVATE:
2067169689Skan	  if (by_ref || is_reference (val))
2068169689Skan	    {
2069169689Skan	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2070169689Skan		continue;
2071169689Skan	      do_in = true;
2072169689Skan	    }
2073169689Skan	  else
2074169689Skan	    do_out = true;
2075169689Skan	  break;
2076169689Skan
2077169689Skan	case OMP_CLAUSE_REDUCTION:
2078169689Skan	  do_in = true;
2079169689Skan	  do_out = !(by_ref || is_reference (val));
2080169689Skan	  break;
2081169689Skan
2082169689Skan	default:
2083169689Skan	  gcc_unreachable ();
2084169689Skan	}
2085169689Skan
2086169689Skan      if (do_in)
2087169689Skan	{
2088169689Skan	  ref = build_sender_ref (val, ctx);
2089169689Skan	  x = by_ref ? build_fold_addr_expr (var) : var;
2090169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, ref, x);
2091169689Skan	  gimplify_and_add (x, ilist);
2092169689Skan	}
2093169689Skan
2094169689Skan      if (do_out)
2095169689Skan	{
2096169689Skan	  ref = build_sender_ref (val, ctx);
2097169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, var, ref);
2098169689Skan	  gimplify_and_add (x, olist);
2099169689Skan	}
2100169689Skan    }
2101169689Skan}
2102169689Skan
2103169689Skan/* Generate code to implement SHARED from the sender (aka parent) side.
2104169689Skan   This is trickier, since OMP_PARALLEL_CLAUSES doesn't list things that
2105169689Skan   got automatically shared.  */
2106169689Skan
2107169689Skanstatic void
2108169689Skanlower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
2109169689Skan{
2110169689Skan  tree var, ovar, nvar, f, x;
2111169689Skan
2112169689Skan  if (ctx->record_type == NULL)
2113169689Skan    return;
2114169689Skan
2115169689Skan  for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
2116169689Skan    {
2117169689Skan      ovar = DECL_ABSTRACT_ORIGIN (f);
2118169689Skan      nvar = maybe_lookup_decl (ovar, ctx);
2119169689Skan      if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2120169689Skan	continue;
2121169689Skan
2122169689Skan      var = ovar;
2123169689Skan
2124169689Skan      /* If CTX is a nested parallel directive.  Find the immediately
2125169689Skan	 enclosing parallel or workshare construct that contains a
2126169689Skan	 mapping for OVAR.  */
2127169689Skan      if (ctx->is_nested)
2128169689Skan	var = lookup_decl_in_outer_ctx (ovar, ctx);
2129169689Skan
2130169689Skan      if (use_pointer_for_field (ovar, true))
2131169689Skan	{
2132169689Skan	  x = build_sender_ref (ovar, ctx);
2133169689Skan	  var = build_fold_addr_expr (var);
2134169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, x, var);
2135169689Skan	  gimplify_and_add (x, ilist);
2136169689Skan	}
2137169689Skan      else
2138169689Skan	{
2139169689Skan	  x = build_sender_ref (ovar, ctx);
2140169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, x, var);
2141169689Skan	  gimplify_and_add (x, ilist);
2142169689Skan
2143169689Skan	  x = build_sender_ref (ovar, ctx);
2144169689Skan	  x = build2 (MODIFY_EXPR, void_type_node, var, x);
2145169689Skan	  gimplify_and_add (x, olist);
2146169689Skan	}
2147169689Skan    }
2148169689Skan}
2149169689Skan
2150169689Skan/* Build the function calls to GOMP_parallel_start etc to actually
2151169689Skan   generate the parallel operation.  REGION is the parallel region
2152169689Skan   being expanded.  BB is the block where to insert the code.  WS_ARGS
2153169689Skan   will be set if this is a call to a combined parallel+workshare
2154169689Skan   construct, it contains the list of additional arguments needed by
2155169689Skan   the workshare construct.  */
2156169689Skan
2157169689Skanstatic void
2158169689Skanexpand_parallel_call (struct omp_region *region, basic_block bb,
2159169689Skan		      tree entry_stmt, tree ws_args)
2160169689Skan{
2161169689Skan  tree t, args, val, cond, c, list, clauses;
2162169689Skan  block_stmt_iterator si;
2163169689Skan  int start_ix;
2164169689Skan
2165169689Skan  clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
2166169689Skan  push_gimplify_context ();
2167169689Skan
2168169689Skan  /* Determine what flavor of GOMP_parallel_start we will be
2169169689Skan     emitting.  */
2170169689Skan  start_ix = BUILT_IN_GOMP_PARALLEL_START;
2171169689Skan  if (is_combined_parallel (region))
2172169689Skan    {
2173169689Skan      switch (region->inner->type)
2174169689Skan	{
2175169689Skan	case OMP_FOR:
2176169689Skan	  start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
2177169689Skan		     + region->inner->sched_kind;
2178169689Skan	  break;
2179169689Skan	case OMP_SECTIONS:
2180169689Skan	  start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2181169689Skan	  break;
2182169689Skan	default:
2183169689Skan	  gcc_unreachable ();
2184169689Skan	}
2185169689Skan    }
2186169689Skan
2187169689Skan  /* By default, the value of NUM_THREADS is zero (selected at run time)
2188169689Skan     and there is no conditional.  */
2189169689Skan  cond = NULL_TREE;
2190169689Skan  val = build_int_cst (unsigned_type_node, 0);
2191169689Skan
2192169689Skan  c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2193169689Skan  if (c)
2194169689Skan    cond = OMP_CLAUSE_IF_EXPR (c);
2195169689Skan
2196169689Skan  c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2197169689Skan  if (c)
2198169689Skan    val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2199169689Skan
2200169689Skan  /* Ensure 'val' is of the correct type.  */
2201169689Skan  val = fold_convert (unsigned_type_node, val);
2202169689Skan
2203169689Skan  /* If we found the clause 'if (cond)', build either
2204169689Skan     (cond != 0) or (cond ? val : 1u).  */
2205169689Skan  if (cond)
2206169689Skan    {
2207169689Skan      block_stmt_iterator si;
2208169689Skan
2209169689Skan      cond = gimple_boolify (cond);
2210169689Skan
2211169689Skan      if (integer_zerop (val))
2212169689Skan	val = build2 (EQ_EXPR, unsigned_type_node, cond,
2213169689Skan		      build_int_cst (TREE_TYPE (cond), 0));
2214169689Skan      else
2215169689Skan	{
2216169689Skan	  basic_block cond_bb, then_bb, else_bb;
2217169689Skan	  edge e;
2218169689Skan	  tree t, then_lab, else_lab, tmp;
2219169689Skan
2220169689Skan	  tmp = create_tmp_var (TREE_TYPE (val), NULL);
2221169689Skan	  e = split_block (bb, NULL);
2222169689Skan	  cond_bb = e->src;
2223169689Skan	  bb = e->dest;
2224169689Skan	  remove_edge (e);
2225169689Skan
2226169689Skan	  then_bb = create_empty_bb (cond_bb);
2227169689Skan	  else_bb = create_empty_bb (then_bb);
2228169689Skan	  then_lab = create_artificial_label ();
2229169689Skan	  else_lab = create_artificial_label ();
2230169689Skan
2231169689Skan	  t = build3 (COND_EXPR, void_type_node,
2232169689Skan		      cond,
2233169689Skan		      build_and_jump (&then_lab),
2234169689Skan		      build_and_jump (&else_lab));
2235169689Skan
2236169689Skan	  si = bsi_start (cond_bb);
2237169689Skan	  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2238169689Skan
2239169689Skan	  si = bsi_start (then_bb);
2240169689Skan	  t = build1 (LABEL_EXPR, void_type_node, then_lab);
2241169689Skan	  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2242169689Skan	  t = build2 (MODIFY_EXPR, void_type_node, tmp, val);
2243169689Skan	  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2244169689Skan
2245169689Skan	  si = bsi_start (else_bb);
2246169689Skan	  t = build1 (LABEL_EXPR, void_type_node, else_lab);
2247169689Skan	  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2248169689Skan	  t = build2 (MODIFY_EXPR, void_type_node, tmp,
2249169689Skan	              build_int_cst (unsigned_type_node, 1));
2250169689Skan	  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2251169689Skan
2252169689Skan	  make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2253169689Skan	  make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
2254169689Skan	  make_edge (then_bb, bb, EDGE_FALLTHRU);
2255169689Skan	  make_edge (else_bb, bb, EDGE_FALLTHRU);
2256169689Skan
2257169689Skan	  val = tmp;
2258169689Skan	}
2259169689Skan
2260169689Skan      list = NULL_TREE;
2261169689Skan      val = get_formal_tmp_var (val, &list);
2262169689Skan      si = bsi_start (bb);
2263169689Skan      bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2264169689Skan    }
2265169689Skan
2266169689Skan  list = NULL_TREE;
2267169689Skan  args = tree_cons (NULL, val, NULL);
2268169689Skan  t = OMP_PARALLEL_DATA_ARG (entry_stmt);
2269169689Skan  if (t == NULL)
2270169689Skan    t = null_pointer_node;
2271169689Skan  else
2272169689Skan    t = build_fold_addr_expr (t);
2273169689Skan  args = tree_cons (NULL, t, args);
2274169689Skan  t = build_fold_addr_expr (OMP_PARALLEL_FN (entry_stmt));
2275169689Skan  args = tree_cons (NULL, t, args);
2276169689Skan
2277169689Skan  if (ws_args)
2278169689Skan    args = chainon (args, ws_args);
2279169689Skan
2280169689Skan  t = built_in_decls[start_ix];
2281169689Skan  t = build_function_call_expr (t, args);
2282169689Skan  gimplify_and_add (t, &list);
2283169689Skan
2284169689Skan  t = OMP_PARALLEL_DATA_ARG (entry_stmt);
2285169689Skan  if (t == NULL)
2286169689Skan    t = null_pointer_node;
2287169689Skan  else
2288169689Skan    t = build_fold_addr_expr (t);
2289169689Skan  args = tree_cons (NULL, t, NULL);
2290169689Skan  t = build_function_call_expr (OMP_PARALLEL_FN (entry_stmt), args);
2291169689Skan  gimplify_and_add (t, &list);
2292169689Skan
2293169689Skan  t = built_in_decls[BUILT_IN_GOMP_PARALLEL_END];
2294169689Skan  t = build_function_call_expr (t, NULL);
2295169689Skan  gimplify_and_add (t, &list);
2296169689Skan
2297169689Skan  si = bsi_last (bb);
2298169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2299169689Skan
2300169689Skan  pop_gimplify_context (NULL_TREE);
2301169689Skan}
2302169689Skan
2303169689Skan
2304169689Skan/* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
2305169689Skan   handler.  This prevents programs from violating the structured
2306169689Skan   block semantics with throws.  */
2307169689Skan
2308169689Skanstatic void
2309169689Skanmaybe_catch_exception (tree *stmt_p)
2310169689Skan{
2311169689Skan  tree f, t;
2312169689Skan
2313169689Skan  if (!flag_exceptions)
2314169689Skan    return;
2315169689Skan
2316169689Skan  if (lang_protect_cleanup_actions)
2317169689Skan    t = lang_protect_cleanup_actions ();
2318169689Skan  else
2319169689Skan    {
2320169689Skan      t = built_in_decls[BUILT_IN_TRAP];
2321169689Skan      t = build_function_call_expr (t, NULL);
2322169689Skan    }
2323169689Skan  f = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
2324169689Skan  EH_FILTER_MUST_NOT_THROW (f) = 1;
2325169689Skan  gimplify_and_add (t, &EH_FILTER_FAILURE (f));
2326169689Skan
2327169689Skan  t = build2 (TRY_CATCH_EXPR, void_type_node, *stmt_p, NULL);
2328169689Skan  append_to_statement_list (f, &TREE_OPERAND (t, 1));
2329169689Skan
2330169689Skan  *stmt_p = NULL;
2331169689Skan  append_to_statement_list (t, stmt_p);
2332169689Skan}
2333169689Skan
2334169689Skan/* Chain all the DECLs in LIST by their TREE_CHAIN fields.  */
2335169689Skan
2336169689Skanstatic tree
2337169689Skanlist2chain (tree list)
2338169689Skan{
2339169689Skan  tree t;
2340169689Skan
2341169689Skan  for (t = list; t; t = TREE_CHAIN (t))
2342169689Skan    {
2343169689Skan      tree var = TREE_VALUE (t);
2344169689Skan      if (TREE_CHAIN (t))
2345169689Skan	TREE_CHAIN (var) = TREE_VALUE (TREE_CHAIN (t));
2346169689Skan      else
2347169689Skan	TREE_CHAIN (var) = NULL_TREE;
2348169689Skan    }
2349169689Skan
2350169689Skan  return list ? TREE_VALUE (list) : NULL_TREE;
2351169689Skan}
2352169689Skan
2353169689Skan
2354169689Skan/* Remove barriers in REGION->EXIT's block.  Note that this is only
2355169689Skan   valid for OMP_PARALLEL regions.  Since the end of a parallel region
2356169689Skan   is an implicit barrier, any workshare inside the OMP_PARALLEL that
2357169689Skan   left a barrier at the end of the OMP_PARALLEL region can now be
2358169689Skan   removed.  */
2359169689Skan
2360169689Skanstatic void
2361169689Skanremove_exit_barrier (struct omp_region *region)
2362169689Skan{
2363169689Skan  block_stmt_iterator si;
2364169689Skan  basic_block exit_bb;
2365169689Skan  edge_iterator ei;
2366169689Skan  edge e;
2367169689Skan  tree t;
2368169689Skan
2369169689Skan  exit_bb = region->exit;
2370169689Skan
2371169689Skan  /* If the parallel region doesn't return, we don't have REGION->EXIT
2372169689Skan     block at all.  */
2373169689Skan  if (! exit_bb)
2374169689Skan    return;
2375169689Skan
2376169689Skan  /* The last insn in the block will be the parallel's OMP_RETURN.  The
2377169689Skan     workshare's OMP_RETURN will be in a preceding block.  The kinds of
2378169689Skan     statements that can appear in between are extremely limited -- no
2379169689Skan     memory operations at all.  Here, we allow nothing at all, so the
2380169689Skan     only thing we allow to precede this OMP_RETURN is a label.  */
2381169689Skan  si = bsi_last (exit_bb);
2382169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
2383169689Skan  bsi_prev (&si);
2384169689Skan  if (!bsi_end_p (si) && TREE_CODE (bsi_stmt (si)) != LABEL_EXPR)
2385169689Skan    return;
2386169689Skan
2387169689Skan  FOR_EACH_EDGE (e, ei, exit_bb->preds)
2388169689Skan    {
2389169689Skan      si = bsi_last (e->src);
2390169689Skan      if (bsi_end_p (si))
2391169689Skan	continue;
2392169689Skan      t = bsi_stmt (si);
2393169689Skan      if (TREE_CODE (t) == OMP_RETURN)
2394169689Skan	OMP_RETURN_NOWAIT (t) = 1;
2395169689Skan    }
2396169689Skan}
2397169689Skan
2398169689Skanstatic void
2399169689Skanremove_exit_barriers (struct omp_region *region)
2400169689Skan{
2401169689Skan  if (region->type == OMP_PARALLEL)
2402169689Skan    remove_exit_barrier (region);
2403169689Skan
2404169689Skan  if (region->inner)
2405169689Skan    {
2406169689Skan      region = region->inner;
2407169689Skan      remove_exit_barriers (region);
2408169689Skan      while (region->next)
2409169689Skan	{
2410169689Skan	  region = region->next;
2411169689Skan	  remove_exit_barriers (region);
2412169689Skan	}
2413169689Skan    }
2414169689Skan}
2415169689Skan
2416169689Skan/* Expand the OpenMP parallel directive starting at REGION.  */
2417169689Skan
2418169689Skanstatic void
2419169689Skanexpand_omp_parallel (struct omp_region *region)
2420169689Skan{
2421169689Skan  basic_block entry_bb, exit_bb, new_bb;
2422169689Skan  struct function *child_cfun, *saved_cfun;
2423169689Skan  tree child_fn, block, t, ws_args;
2424169689Skan  block_stmt_iterator si;
2425169689Skan  tree entry_stmt;
2426169689Skan  edge e;
2427169689Skan  bool do_cleanup_cfg = false;
2428169689Skan
2429169689Skan  entry_stmt = last_stmt (region->entry);
2430169689Skan  child_fn = OMP_PARALLEL_FN (entry_stmt);
2431169689Skan  child_cfun = DECL_STRUCT_FUNCTION (child_fn);
2432169689Skan  saved_cfun = cfun;
2433169689Skan
2434169689Skan  entry_bb = region->entry;
2435169689Skan  exit_bb = region->exit;
2436169689Skan
2437169689Skan  if (is_combined_parallel (region))
2438169689Skan    ws_args = region->ws_args;
2439169689Skan  else
2440169689Skan    ws_args = NULL_TREE;
2441169689Skan
2442169689Skan  if (child_cfun->cfg)
2443169689Skan    {
2444169689Skan      /* Due to inlining, it may happen that we have already outlined
2445169689Skan	 the region, in which case all we need to do is make the
2446169689Skan	 sub-graph unreachable and emit the parallel call.  */
2447169689Skan      edge entry_succ_e, exit_succ_e;
2448169689Skan      block_stmt_iterator si;
2449169689Skan
2450169689Skan      entry_succ_e = single_succ_edge (entry_bb);
2451169689Skan
2452169689Skan      si = bsi_last (entry_bb);
2453169689Skan      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL);
2454169689Skan      bsi_remove (&si, true);
2455169689Skan
2456169689Skan      new_bb = entry_bb;
2457169689Skan      remove_edge (entry_succ_e);
2458169689Skan      if (exit_bb)
2459169689Skan	{
2460169689Skan	  exit_succ_e = single_succ_edge (exit_bb);
2461169689Skan	  make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
2462169689Skan	}
2463169689Skan      do_cleanup_cfg = true;
2464169689Skan    }
2465169689Skan  else
2466169689Skan    {
2467169689Skan      /* If the parallel region needs data sent from the parent
2468169689Skan	 function, then the very first statement (except possible
2469169689Skan	 tree profile counter updates) of the parallel body
2470169689Skan	 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O.  Since
2471169689Skan	 &.OMP_DATA_O is passed as an argument to the child function,
2472169689Skan	 we need to replace it with the argument as seen by the child
2473169689Skan	 function.
2474169689Skan
2475169689Skan	 In most cases, this will end up being the identity assignment
2476169689Skan	 .OMP_DATA_I = .OMP_DATA_I.  However, if the parallel body had
2477169689Skan	 a function call that has been inlined, the original PARM_DECL
2478169689Skan	 .OMP_DATA_I may have been converted into a different local
2479169689Skan	 variable.  In which case, we need to keep the assignment.  */
2480169689Skan      if (OMP_PARALLEL_DATA_ARG (entry_stmt))
2481169689Skan	{
2482169689Skan	  basic_block entry_succ_bb = single_succ (entry_bb);
2483169689Skan	  block_stmt_iterator si;
2484169689Skan
2485169689Skan	  for (si = bsi_start (entry_succ_bb); ; bsi_next (&si))
2486169689Skan	    {
2487169689Skan	      tree stmt, arg;
2488169689Skan
2489169689Skan	      gcc_assert (!bsi_end_p (si));
2490169689Skan	      stmt = bsi_stmt (si);
2491169689Skan	      if (TREE_CODE (stmt) != MODIFY_EXPR)
2492169689Skan		continue;
2493169689Skan
2494169689Skan	      arg = TREE_OPERAND (stmt, 1);
2495169689Skan	      STRIP_NOPS (arg);
2496169689Skan	      if (TREE_CODE (arg) == ADDR_EXPR
2497169689Skan		  && TREE_OPERAND (arg, 0)
2498169689Skan		     == OMP_PARALLEL_DATA_ARG (entry_stmt))
2499169689Skan		{
2500169689Skan		  if (TREE_OPERAND (stmt, 0) == DECL_ARGUMENTS (child_fn))
2501169689Skan		    bsi_remove (&si, true);
2502169689Skan		  else
2503169689Skan		    TREE_OPERAND (stmt, 1) = DECL_ARGUMENTS (child_fn);
2504169689Skan		  break;
2505169689Skan		}
2506169689Skan	    }
2507169689Skan	}
2508169689Skan
2509169689Skan      /* Declare local variables needed in CHILD_CFUN.  */
2510169689Skan      block = DECL_INITIAL (child_fn);
2511169689Skan      BLOCK_VARS (block) = list2chain (child_cfun->unexpanded_var_list);
2512169689Skan      DECL_SAVED_TREE (child_fn) = single_succ (entry_bb)->stmt_list;
2513169689Skan
2514169689Skan      /* Reset DECL_CONTEXT on locals and function arguments.  */
2515169689Skan      for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
2516169689Skan	DECL_CONTEXT (t) = child_fn;
2517169689Skan
2518169689Skan      for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
2519169689Skan	DECL_CONTEXT (t) = child_fn;
2520169689Skan
2521169689Skan      /* Split ENTRY_BB at OMP_PARALLEL so that it can be moved to the
2522169689Skan	 child function.  */
2523169689Skan      si = bsi_last (entry_bb);
2524169689Skan      t = bsi_stmt (si);
2525169689Skan      gcc_assert (t && TREE_CODE (t) == OMP_PARALLEL);
2526169689Skan      bsi_remove (&si, true);
2527169689Skan      e = split_block (entry_bb, t);
2528169689Skan      entry_bb = e->dest;
2529169689Skan      single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
2530169689Skan
2531169689Skan      /* Move the parallel region into CHILD_CFUN.  We need to reset
2532169689Skan	 dominance information because the expansion of the inner
2533169689Skan	 regions has invalidated it.  */
2534169689Skan      free_dominance_info (CDI_DOMINATORS);
2535169689Skan      new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb);
2536169689Skan      if (exit_bb)
2537169689Skan	single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
2538169689Skan      cgraph_add_new_function (child_fn);
2539169689Skan
2540169689Skan      /* Convert OMP_RETURN into a RETURN_EXPR.  */
2541169689Skan      if (exit_bb)
2542169689Skan	{
2543169689Skan	  si = bsi_last (exit_bb);
2544169689Skan	  gcc_assert (!bsi_end_p (si)
2545169689Skan		      && TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
2546169689Skan	  t = build1 (RETURN_EXPR, void_type_node, NULL);
2547169689Skan	  bsi_insert_after (&si, t, BSI_SAME_STMT);
2548169689Skan	  bsi_remove (&si, true);
2549169689Skan	}
2550169689Skan    }
2551169689Skan
2552169689Skan  /* Emit a library call to launch the children threads.  */
2553169689Skan  expand_parallel_call (region, new_bb, entry_stmt, ws_args);
2554169689Skan
2555169689Skan  if (do_cleanup_cfg)
2556169689Skan    {
2557169689Skan      /* Clean up the unreachable sub-graph we created above.  */
2558169689Skan      free_dominance_info (CDI_DOMINATORS);
2559169689Skan      free_dominance_info (CDI_POST_DOMINATORS);
2560169689Skan      cleanup_tree_cfg ();
2561169689Skan    }
2562169689Skan}
2563169689Skan
2564169689Skan
2565169689Skan/* A subroutine of expand_omp_for.  Generate code for a parallel
2566169689Skan   loop with any schedule.  Given parameters:
2567169689Skan
2568169689Skan	for (V = N1; V cond N2; V += STEP) BODY;
2569169689Skan
2570169689Skan   where COND is "<" or ">", we generate pseudocode
2571169689Skan
2572169689Skan	more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
2573169689Skan	if (more) goto L0; else goto L3;
2574169689Skan    L0:
2575169689Skan	V = istart0;
2576169689Skan	iend = iend0;
2577169689Skan    L1:
2578169689Skan	BODY;
2579169689Skan	V += STEP;
2580169689Skan	if (V cond iend) goto L1; else goto L2;
2581169689Skan    L2:
2582169689Skan	if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
2583169689Skan    L3:
2584169689Skan
2585169689Skan    If this is a combined omp parallel loop, instead of the call to
2586169689Skan    GOMP_loop_foo_start, we emit 'goto L3'.  */
2587169689Skan
2588169689Skanstatic void
2589169689Skanexpand_omp_for_generic (struct omp_region *region,
2590169689Skan			struct omp_for_data *fd,
2591169689Skan			enum built_in_function start_fn,
2592169689Skan			enum built_in_function next_fn)
2593169689Skan{
2594169689Skan  tree l0, l1, l2 = NULL, l3 = NULL;
2595169689Skan  tree type, istart0, iend0, iend;
2596169689Skan  tree t, args, list;
2597169689Skan  basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb;
2598169689Skan  basic_block l2_bb = NULL, l3_bb = NULL;
2599169689Skan  block_stmt_iterator si;
2600169689Skan  bool in_combined_parallel = is_combined_parallel (region);
2601169689Skan
2602169689Skan  type = TREE_TYPE (fd->v);
2603169689Skan
2604169689Skan  istart0 = create_tmp_var (long_integer_type_node, ".istart0");
2605169689Skan  iend0 = create_tmp_var (long_integer_type_node, ".iend0");
2606169689Skan  iend = create_tmp_var (type, NULL);
2607169689Skan  TREE_ADDRESSABLE (istart0) = 1;
2608169689Skan  TREE_ADDRESSABLE (iend0) = 1;
2609169689Skan
2610169689Skan  gcc_assert ((region->cont != NULL) ^ (region->exit == NULL));
2611169689Skan
2612169689Skan  entry_bb = region->entry;
2613169689Skan  l0_bb = create_empty_bb (entry_bb);
2614169689Skan  l1_bb = single_succ (entry_bb);
2615169689Skan
2616169689Skan  l0 = tree_block_label (l0_bb);
2617169689Skan  l1 = tree_block_label (l1_bb);
2618169689Skan
2619169689Skan  cont_bb = region->cont;
2620169689Skan  exit_bb = region->exit;
2621169689Skan  if (cont_bb)
2622169689Skan    {
2623169689Skan      l2_bb = create_empty_bb (cont_bb);
2624169689Skan      l3_bb = single_succ (cont_bb);
2625169689Skan
2626169689Skan      l2 = tree_block_label (l2_bb);
2627169689Skan      l3 = tree_block_label (l3_bb);
2628169689Skan    }
2629169689Skan
2630169689Skan  si = bsi_last (entry_bb);
2631169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
2632169689Skan  if (!in_combined_parallel)
2633169689Skan    {
2634169689Skan      /* If this is not a combined parallel loop, emit a call to
2635169689Skan	 GOMP_loop_foo_start in ENTRY_BB.  */
2636169689Skan      list = alloc_stmt_list ();
2637169689Skan      t = build_fold_addr_expr (iend0);
2638169689Skan      args = tree_cons (NULL, t, NULL);
2639169689Skan      t = build_fold_addr_expr (istart0);
2640169689Skan      args = tree_cons (NULL, t, args);
2641169689Skan      if (fd->chunk_size)
2642169689Skan	{
2643169689Skan	  t = fold_convert (long_integer_type_node, fd->chunk_size);
2644169689Skan	  args = tree_cons (NULL, t, args);
2645169689Skan	}
2646169689Skan      t = fold_convert (long_integer_type_node, fd->step);
2647169689Skan      args = tree_cons (NULL, t, args);
2648169689Skan      t = fold_convert (long_integer_type_node, fd->n2);
2649169689Skan      args = tree_cons (NULL, t, args);
2650169689Skan      t = fold_convert (long_integer_type_node, fd->n1);
2651169689Skan      args = tree_cons (NULL, t, args);
2652169689Skan      t = build_function_call_expr (built_in_decls[start_fn], args);
2653169689Skan      t = get_formal_tmp_var (t, &list);
2654169689Skan      if (cont_bb)
2655169689Skan	{
2656169689Skan	  t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
2657169689Skan		      build_and_jump (&l3));
2658169689Skan	  append_to_statement_list (t, &list);
2659169689Skan	}
2660169689Skan      bsi_insert_after (&si, list, BSI_SAME_STMT);
2661169689Skan    }
2662169689Skan  bsi_remove (&si, true);
2663169689Skan
2664169689Skan  /* Iteration setup for sequential loop goes in L0_BB.  */
2665169689Skan  list = alloc_stmt_list ();
2666169689Skan  t = fold_convert (type, istart0);
2667169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
2668169689Skan  gimplify_and_add (t, &list);
2669169689Skan
2670169689Skan  t = fold_convert (type, iend0);
2671169689Skan  t = build2 (MODIFY_EXPR, void_type_node, iend, t);
2672169689Skan  gimplify_and_add (t, &list);
2673169689Skan
2674169689Skan  si = bsi_start (l0_bb);
2675169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2676169689Skan
2677169689Skan  /* Handle the rare case where BODY doesn't ever return.  */
2678169689Skan  if (cont_bb == NULL)
2679169689Skan    {
2680169689Skan      remove_edge (single_succ_edge (entry_bb));
2681169689Skan      make_edge (entry_bb, l0_bb, EDGE_FALLTHRU);
2682169689Skan      make_edge (l0_bb, l1_bb, EDGE_FALLTHRU);
2683169689Skan      return;
2684169689Skan    }
2685169689Skan
2686169689Skan  /* Code to control the increment and predicate for the sequential
2687169689Skan     loop goes in the first half of EXIT_BB (we split EXIT_BB so
2688169689Skan     that we can inherit all the edges going out of the loop
2689169689Skan     body).  */
2690169689Skan  list = alloc_stmt_list ();
2691169689Skan
2692169689Skan  t = build2 (PLUS_EXPR, type, fd->v, fd->step);
2693169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
2694169689Skan  gimplify_and_add (t, &list);
2695169689Skan
2696169689Skan  t = build2 (fd->cond_code, boolean_type_node, fd->v, iend);
2697169689Skan  t = get_formal_tmp_var (t, &list);
2698169689Skan  t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
2699169689Skan	      build_and_jump (&l2));
2700169689Skan  append_to_statement_list (t, &list);
2701169689Skan
2702169689Skan  si = bsi_last (cont_bb);
2703169689Skan  bsi_insert_after (&si, list, BSI_SAME_STMT);
2704169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
2705169689Skan  bsi_remove (&si, true);
2706169689Skan
2707169689Skan  /* Emit code to get the next parallel iteration in L2_BB.  */
2708169689Skan  list = alloc_stmt_list ();
2709169689Skan
2710169689Skan  t = build_fold_addr_expr (iend0);
2711169689Skan  args = tree_cons (NULL, t, NULL);
2712169689Skan  t = build_fold_addr_expr (istart0);
2713169689Skan  args = tree_cons (NULL, t, args);
2714169689Skan  t = build_function_call_expr (built_in_decls[next_fn], args);
2715169689Skan  t = get_formal_tmp_var (t, &list);
2716169689Skan  t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
2717169689Skan	      build_and_jump (&l3));
2718169689Skan  append_to_statement_list (t, &list);
2719169689Skan
2720169689Skan  si = bsi_start (l2_bb);
2721169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2722169689Skan
2723169689Skan  /* Add the loop cleanup function.  */
2724169689Skan  si = bsi_last (exit_bb);
2725169689Skan  if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
2726169689Skan    t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
2727169689Skan  else
2728169689Skan    t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
2729169689Skan  t = build_function_call_expr (t, NULL);
2730169689Skan  bsi_insert_after (&si, t, BSI_SAME_STMT);
2731169689Skan  bsi_remove (&si, true);
2732169689Skan
2733169689Skan  /* Connect the new blocks.  */
2734169689Skan  remove_edge (single_succ_edge (entry_bb));
2735169689Skan  if (in_combined_parallel)
2736169689Skan    make_edge (entry_bb, l2_bb, EDGE_FALLTHRU);
2737169689Skan  else
2738169689Skan    {
2739169689Skan      make_edge (entry_bb, l0_bb, EDGE_TRUE_VALUE);
2740169689Skan      make_edge (entry_bb, l3_bb, EDGE_FALSE_VALUE);
2741169689Skan    }
2742169689Skan
2743169689Skan  make_edge (l0_bb, l1_bb, EDGE_FALLTHRU);
2744169689Skan
2745169689Skan  remove_edge (single_succ_edge (cont_bb));
2746169689Skan  make_edge (cont_bb, l1_bb, EDGE_TRUE_VALUE);
2747169689Skan  make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
2748169689Skan
2749169689Skan  make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
2750169689Skan  make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
2751169689Skan}
2752169689Skan
2753169689Skan
2754169689Skan/* A subroutine of expand_omp_for.  Generate code for a parallel
2755169689Skan   loop with static schedule and no specified chunk size.  Given
2756169689Skan   parameters:
2757169689Skan
2758169689Skan	for (V = N1; V cond N2; V += STEP) BODY;
2759169689Skan
2760169689Skan   where COND is "<" or ">", we generate pseudocode
2761169689Skan
2762169689Skan	if (cond is <)
2763169689Skan	  adj = STEP - 1;
2764169689Skan	else
2765169689Skan	  adj = STEP + 1;
2766169689Skan	n = (adj + N2 - N1) / STEP;
2767169689Skan	q = n / nthreads;
2768169689Skan	q += (q * nthreads != n);
2769169689Skan	s0 = q * threadid;
2770169689Skan	e0 = min(s0 + q, n);
2771169689Skan	if (s0 >= e0) goto L2; else goto L0;
2772169689Skan    L0:
2773169689Skan	V = s0 * STEP + N1;
2774169689Skan	e = e0 * STEP + N1;
2775169689Skan    L1:
2776169689Skan	BODY;
2777169689Skan	V += STEP;
2778169689Skan	if (V cond e) goto L1;
2779169689Skan    L2:
2780169689Skan*/
2781169689Skan
2782169689Skanstatic void
2783169689Skanexpand_omp_for_static_nochunk (struct omp_region *region,
2784169689Skan			       struct omp_for_data *fd)
2785169689Skan{
2786169689Skan  tree l0, l1, l2, n, q, s0, e0, e, t, nthreads, threadid;
2787169689Skan  tree type, list;
2788169689Skan  basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
2789169689Skan  basic_block fin_bb;
2790169689Skan  block_stmt_iterator si;
2791169689Skan
2792169689Skan  type = TREE_TYPE (fd->v);
2793169689Skan
2794169689Skan  entry_bb = region->entry;
2795169689Skan  seq_start_bb = create_empty_bb (entry_bb);
2796169689Skan  body_bb = single_succ (entry_bb);
2797169689Skan  cont_bb = region->cont;
2798169689Skan  fin_bb = single_succ (cont_bb);
2799169689Skan  exit_bb = region->exit;
2800169689Skan
2801169689Skan  l0 = tree_block_label (seq_start_bb);
2802169689Skan  l1 = tree_block_label (body_bb);
2803169689Skan  l2 = tree_block_label (fin_bb);
2804169689Skan
2805169689Skan  /* Iteration space partitioning goes in ENTRY_BB.  */
2806169689Skan  list = alloc_stmt_list ();
2807169689Skan
2808169689Skan  t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
2809169689Skan  t = build_function_call_expr (t, NULL);
2810169689Skan  t = fold_convert (type, t);
2811169689Skan  nthreads = get_formal_tmp_var (t, &list);
2812169689Skan
2813169689Skan  t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
2814169689Skan  t = build_function_call_expr (t, NULL);
2815169689Skan  t = fold_convert (type, t);
2816169689Skan  threadid = get_formal_tmp_var (t, &list);
2817169689Skan
2818169689Skan  fd->n1 = fold_convert (type, fd->n1);
2819169689Skan  if (!is_gimple_val (fd->n1))
2820169689Skan    fd->n1 = get_formal_tmp_var (fd->n1, &list);
2821169689Skan
2822169689Skan  fd->n2 = fold_convert (type, fd->n2);
2823169689Skan  if (!is_gimple_val (fd->n2))
2824169689Skan    fd->n2 = get_formal_tmp_var (fd->n2, &list);
2825169689Skan
2826169689Skan  fd->step = fold_convert (type, fd->step);
2827169689Skan  if (!is_gimple_val (fd->step))
2828169689Skan    fd->step = get_formal_tmp_var (fd->step, &list);
2829169689Skan
2830169689Skan  t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
2831169689Skan  t = fold_build2 (PLUS_EXPR, type, fd->step, t);
2832169689Skan  t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
2833169689Skan  t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
2834169689Skan  t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
2835169689Skan  t = fold_convert (type, t);
2836169689Skan  if (is_gimple_val (t))
2837169689Skan    n = t;
2838169689Skan  else
2839169689Skan    n = get_formal_tmp_var (t, &list);
2840169689Skan
2841169689Skan  t = build2 (TRUNC_DIV_EXPR, type, n, nthreads);
2842169689Skan  q = get_formal_tmp_var (t, &list);
2843169689Skan
2844169689Skan  t = build2 (MULT_EXPR, type, q, nthreads);
2845169689Skan  t = build2 (NE_EXPR, type, t, n);
2846169689Skan  t = build2 (PLUS_EXPR, type, q, t);
2847169689Skan  q = get_formal_tmp_var (t, &list);
2848169689Skan
2849169689Skan  t = build2 (MULT_EXPR, type, q, threadid);
2850169689Skan  s0 = get_formal_tmp_var (t, &list);
2851169689Skan
2852169689Skan  t = build2 (PLUS_EXPR, type, s0, q);
2853169689Skan  t = build2 (MIN_EXPR, type, t, n);
2854169689Skan  e0 = get_formal_tmp_var (t, &list);
2855169689Skan
2856169689Skan  t = build2 (GE_EXPR, boolean_type_node, s0, e0);
2857169689Skan  t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l2),
2858169689Skan	      build_and_jump (&l0));
2859169689Skan  append_to_statement_list (t, &list);
2860169689Skan
2861169689Skan  si = bsi_last (entry_bb);
2862169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
2863169689Skan  bsi_insert_after (&si, list, BSI_SAME_STMT);
2864169689Skan  bsi_remove (&si, true);
2865169689Skan
2866169689Skan  /* Setup code for sequential iteration goes in SEQ_START_BB.  */
2867169689Skan  list = alloc_stmt_list ();
2868169689Skan
2869169689Skan  t = fold_convert (type, s0);
2870169689Skan  t = build2 (MULT_EXPR, type, t, fd->step);
2871169689Skan  t = build2 (PLUS_EXPR, type, t, fd->n1);
2872169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
2873169689Skan  gimplify_and_add (t, &list);
2874169689Skan
2875169689Skan  t = fold_convert (type, e0);
2876169689Skan  t = build2 (MULT_EXPR, type, t, fd->step);
2877169689Skan  t = build2 (PLUS_EXPR, type, t, fd->n1);
2878169689Skan  e = get_formal_tmp_var (t, &list);
2879169689Skan
2880169689Skan  si = bsi_start (seq_start_bb);
2881169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2882169689Skan
2883169689Skan  /* The code controlling the sequential loop replaces the OMP_CONTINUE.  */
2884169689Skan  list = alloc_stmt_list ();
2885169689Skan
2886169689Skan  t = build2 (PLUS_EXPR, type, fd->v, fd->step);
2887169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
2888169689Skan  gimplify_and_add (t, &list);
2889169689Skan
2890169689Skan  t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
2891169689Skan  t = get_formal_tmp_var (t, &list);
2892169689Skan  t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
2893169689Skan	      build_and_jump (&l2));
2894169689Skan  append_to_statement_list (t, &list);
2895169689Skan
2896169689Skan  si = bsi_last (cont_bb);
2897169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
2898169689Skan  bsi_insert_after (&si, list, BSI_SAME_STMT);
2899169689Skan  bsi_remove (&si, true);
2900169689Skan
2901169689Skan  /* Replace the OMP_RETURN with a barrier, or nothing.  */
2902169689Skan  si = bsi_last (exit_bb);
2903169689Skan  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
2904169689Skan    {
2905169689Skan      list = alloc_stmt_list ();
2906169689Skan      build_omp_barrier (&list);
2907169689Skan      bsi_insert_after (&si, list, BSI_SAME_STMT);
2908169689Skan    }
2909169689Skan  bsi_remove (&si, true);
2910169689Skan
2911169689Skan  /* Connect all the blocks.  */
2912169689Skan  make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
2913169689Skan
2914169689Skan  remove_edge (single_succ_edge (entry_bb));
2915169689Skan  make_edge (entry_bb, fin_bb, EDGE_TRUE_VALUE);
2916169689Skan  make_edge (entry_bb, seq_start_bb, EDGE_FALSE_VALUE);
2917169689Skan
2918169689Skan  make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
2919169689Skan  find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
2920169689Skan}
2921169689Skan
2922169689Skan
2923169689Skan/* A subroutine of expand_omp_for.  Generate code for a parallel
2924169689Skan   loop with static schedule and a specified chunk size.  Given
2925169689Skan   parameters:
2926169689Skan
2927169689Skan	for (V = N1; V cond N2; V += STEP) BODY;
2928169689Skan
2929169689Skan   where COND is "<" or ">", we generate pseudocode
2930169689Skan
2931169689Skan	if (cond is <)
2932169689Skan	  adj = STEP - 1;
2933169689Skan	else
2934169689Skan	  adj = STEP + 1;
2935169689Skan	n = (adj + N2 - N1) / STEP;
2936169689Skan	trip = 0;
2937169689Skan    L0:
2938169689Skan	s0 = (trip * nthreads + threadid) * CHUNK;
2939169689Skan	e0 = min(s0 + CHUNK, n);
2940169689Skan	if (s0 < n) goto L1; else goto L4;
2941169689Skan    L1:
2942169689Skan	V = s0 * STEP + N1;
2943169689Skan	e = e0 * STEP + N1;
2944169689Skan    L2:
2945169689Skan	BODY;
2946169689Skan	V += STEP;
2947169689Skan	if (V cond e) goto L2; else goto L3;
2948169689Skan    L3:
2949169689Skan	trip += 1;
2950169689Skan	goto L0;
2951169689Skan    L4:
2952169689Skan*/
2953169689Skan
2954169689Skanstatic void
2955169689Skanexpand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
2956169689Skan{
2957169689Skan  tree l0, l1, l2, l3, l4, n, s0, e0, e, t;
2958169689Skan  tree trip, nthreads, threadid;
2959169689Skan  tree type;
2960169689Skan  basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
2961169689Skan  basic_block trip_update_bb, cont_bb, fin_bb;
2962169689Skan  tree list;
2963169689Skan  block_stmt_iterator si;
2964169689Skan
2965169689Skan  type = TREE_TYPE (fd->v);
2966169689Skan
2967169689Skan  entry_bb = region->entry;
2968169689Skan  iter_part_bb = create_empty_bb (entry_bb);
2969169689Skan  seq_start_bb = create_empty_bb (iter_part_bb);
2970169689Skan  body_bb = single_succ (entry_bb);
2971169689Skan  cont_bb = region->cont;
2972169689Skan  trip_update_bb = create_empty_bb (cont_bb);
2973169689Skan  fin_bb = single_succ (cont_bb);
2974169689Skan  exit_bb = region->exit;
2975169689Skan
2976169689Skan  l0 = tree_block_label (iter_part_bb);
2977169689Skan  l1 = tree_block_label (seq_start_bb);
2978169689Skan  l2 = tree_block_label (body_bb);
2979169689Skan  l3 = tree_block_label (trip_update_bb);
2980169689Skan  l4 = tree_block_label (fin_bb);
2981169689Skan
2982169689Skan  /* Trip and adjustment setup goes in ENTRY_BB.  */
2983169689Skan  list = alloc_stmt_list ();
2984169689Skan
2985169689Skan  t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
2986169689Skan  t = build_function_call_expr (t, NULL);
2987169689Skan  t = fold_convert (type, t);
2988169689Skan  nthreads = get_formal_tmp_var (t, &list);
2989169689Skan
2990169689Skan  t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
2991169689Skan  t = build_function_call_expr (t, NULL);
2992169689Skan  t = fold_convert (type, t);
2993169689Skan  threadid = get_formal_tmp_var (t, &list);
2994169689Skan
2995169689Skan  fd->n1 = fold_convert (type, fd->n1);
2996169689Skan  if (!is_gimple_val (fd->n1))
2997169689Skan    fd->n1 = get_formal_tmp_var (fd->n1, &list);
2998169689Skan
2999169689Skan  fd->n2 = fold_convert (type, fd->n2);
3000169689Skan  if (!is_gimple_val (fd->n2))
3001169689Skan    fd->n2 = get_formal_tmp_var (fd->n2, &list);
3002169689Skan
3003169689Skan  fd->step = fold_convert (type, fd->step);
3004169689Skan  if (!is_gimple_val (fd->step))
3005169689Skan    fd->step = get_formal_tmp_var (fd->step, &list);
3006169689Skan
3007169689Skan  fd->chunk_size = fold_convert (type, fd->chunk_size);
3008169689Skan  if (!is_gimple_val (fd->chunk_size))
3009169689Skan    fd->chunk_size = get_formal_tmp_var (fd->chunk_size, &list);
3010169689Skan
3011169689Skan  t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
3012169689Skan  t = fold_build2 (PLUS_EXPR, type, fd->step, t);
3013169689Skan  t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
3014169689Skan  t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
3015169689Skan  t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
3016169689Skan  t = fold_convert (type, t);
3017169689Skan  if (is_gimple_val (t))
3018169689Skan    n = t;
3019169689Skan  else
3020169689Skan    n = get_formal_tmp_var (t, &list);
3021169689Skan
3022169689Skan  t = build_int_cst (type, 0);
3023169689Skan  trip = get_initialized_tmp_var (t, &list, NULL);
3024169689Skan
3025169689Skan  si = bsi_last (entry_bb);
3026169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
3027169689Skan  bsi_insert_after (&si, list, BSI_SAME_STMT);
3028169689Skan  bsi_remove (&si, true);
3029169689Skan
3030169689Skan  /* Iteration space partitioning goes in ITER_PART_BB.  */
3031169689Skan  list = alloc_stmt_list ();
3032169689Skan
3033169689Skan  t = build2 (MULT_EXPR, type, trip, nthreads);
3034169689Skan  t = build2 (PLUS_EXPR, type, t, threadid);
3035169689Skan  t = build2 (MULT_EXPR, type, t, fd->chunk_size);
3036169689Skan  s0 = get_formal_tmp_var (t, &list);
3037169689Skan
3038169689Skan  t = build2 (PLUS_EXPR, type, s0, fd->chunk_size);
3039169689Skan  t = build2 (MIN_EXPR, type, t, n);
3040169689Skan  e0 = get_formal_tmp_var (t, &list);
3041169689Skan
3042169689Skan  t = build2 (LT_EXPR, boolean_type_node, s0, n);
3043169689Skan  t = build3 (COND_EXPR, void_type_node, t,
3044169689Skan	      build_and_jump (&l1), build_and_jump (&l4));
3045169689Skan  append_to_statement_list (t, &list);
3046169689Skan
3047169689Skan  si = bsi_start (iter_part_bb);
3048169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
3049169689Skan
3050169689Skan  /* Setup code for sequential iteration goes in SEQ_START_BB.  */
3051169689Skan  list = alloc_stmt_list ();
3052169689Skan
3053169689Skan  t = fold_convert (type, s0);
3054169689Skan  t = build2 (MULT_EXPR, type, t, fd->step);
3055169689Skan  t = build2 (PLUS_EXPR, type, t, fd->n1);
3056169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
3057169689Skan  gimplify_and_add (t, &list);
3058169689Skan
3059169689Skan  t = fold_convert (type, e0);
3060169689Skan  t = build2 (MULT_EXPR, type, t, fd->step);
3061169689Skan  t = build2 (PLUS_EXPR, type, t, fd->n1);
3062169689Skan  e = get_formal_tmp_var (t, &list);
3063169689Skan
3064169689Skan  si = bsi_start (seq_start_bb);
3065169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
3066169689Skan
3067169689Skan  /* The code controlling the sequential loop goes in CONT_BB,
3068169689Skan     replacing the OMP_CONTINUE.  */
3069169689Skan  list = alloc_stmt_list ();
3070169689Skan
3071169689Skan  t = build2 (PLUS_EXPR, type, fd->v, fd->step);
3072169689Skan  t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
3073169689Skan  gimplify_and_add (t, &list);
3074169689Skan
3075169689Skan  t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
3076169689Skan  t = get_formal_tmp_var (t, &list);
3077169689Skan  t = build3 (COND_EXPR, void_type_node, t,
3078169689Skan	      build_and_jump (&l2), build_and_jump (&l3));
3079169689Skan  append_to_statement_list (t, &list);
3080169689Skan
3081169689Skan  si = bsi_last (cont_bb);
3082169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
3083169689Skan  bsi_insert_after (&si, list, BSI_SAME_STMT);
3084169689Skan  bsi_remove (&si, true);
3085169689Skan
3086169689Skan  /* Trip update code goes into TRIP_UPDATE_BB.  */
3087169689Skan  list = alloc_stmt_list ();
3088169689Skan
3089169689Skan  t = build_int_cst (type, 1);
3090169689Skan  t = build2 (PLUS_EXPR, type, trip, t);
3091169689Skan  t = build2 (MODIFY_EXPR, void_type_node, trip, t);
3092169689Skan  gimplify_and_add (t, &list);
3093169689Skan
3094169689Skan  si = bsi_start (trip_update_bb);
3095169689Skan  bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
3096169689Skan
3097169689Skan  /* Replace the OMP_RETURN with a barrier, or nothing.  */
3098169689Skan  si = bsi_last (exit_bb);
3099169689Skan  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
3100169689Skan    {
3101169689Skan      list = alloc_stmt_list ();
3102169689Skan      build_omp_barrier (&list);
3103169689Skan      bsi_insert_after (&si, list, BSI_SAME_STMT);
3104169689Skan    }
3105169689Skan  bsi_remove (&si, true);
3106169689Skan
3107169689Skan  /* Connect the new blocks.  */
3108169689Skan  remove_edge (single_succ_edge (entry_bb));
3109169689Skan  make_edge (entry_bb, iter_part_bb, EDGE_FALLTHRU);
3110169689Skan
3111169689Skan  make_edge (iter_part_bb, seq_start_bb, EDGE_TRUE_VALUE);
3112169689Skan  make_edge (iter_part_bb, fin_bb, EDGE_FALSE_VALUE);
3113169689Skan
3114169689Skan  make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
3115169689Skan
3116169689Skan  remove_edge (single_succ_edge (cont_bb));
3117169689Skan  make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
3118169689Skan  make_edge (cont_bb, trip_update_bb, EDGE_FALSE_VALUE);
3119169689Skan
3120169689Skan  make_edge (trip_update_bb, iter_part_bb, EDGE_FALLTHRU);
3121169689Skan}
3122169689Skan
3123169689Skan
3124169689Skan/* Expand the OpenMP loop defined by REGION.  */
3125169689Skan
3126169689Skanstatic void
3127169689Skanexpand_omp_for (struct omp_region *region)
3128169689Skan{
3129169689Skan  struct omp_for_data fd;
3130169689Skan
3131169689Skan  push_gimplify_context ();
3132169689Skan
3133169689Skan  extract_omp_for_data (last_stmt (region->entry), &fd);
3134169689Skan  region->sched_kind = fd.sched_kind;
3135169689Skan
3136169689Skan  if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
3137169689Skan      && !fd.have_ordered
3138169689Skan      && region->cont
3139169689Skan      && region->exit)
3140169689Skan    {
3141169689Skan      if (fd.chunk_size == NULL)
3142169689Skan	expand_omp_for_static_nochunk (region, &fd);
3143169689Skan      else
3144169689Skan	expand_omp_for_static_chunk (region, &fd);
3145169689Skan    }
3146169689Skan  else
3147169689Skan    {
3148169689Skan      int fn_index = fd.sched_kind + fd.have_ordered * 4;
3149169689Skan      int start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
3150169689Skan      int next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
3151169689Skan      expand_omp_for_generic (region, &fd, start_ix, next_ix);
3152169689Skan    }
3153169689Skan
3154169689Skan  pop_gimplify_context (NULL);
3155169689Skan}
3156169689Skan
3157169689Skan
3158169689Skan/* Expand code for an OpenMP sections directive.  In pseudo code, we generate
3159169689Skan
3160169689Skan	v = GOMP_sections_start (n);
3161169689Skan    L0:
3162169689Skan	switch (v)
3163169689Skan	  {
3164169689Skan	  case 0:
3165169689Skan	    goto L2;
3166169689Skan	  case 1:
3167169689Skan	    section 1;
3168169689Skan	    goto L1;
3169169689Skan	  case 2:
3170169689Skan	    ...
3171169689Skan	  case n:
3172169689Skan	    ...
3173169689Skan	  default:
3174169689Skan	    abort ();
3175169689Skan	  }
3176169689Skan    L1:
3177169689Skan	v = GOMP_sections_next ();
3178169689Skan	goto L0;
3179169689Skan    L2:
3180169689Skan	reduction;
3181169689Skan
3182169689Skan    If this is a combined parallel sections, replace the call to
3183169689Skan    GOMP_sections_start with 'goto L1'.  */
3184169689Skan
3185169689Skanstatic void
3186169689Skanexpand_omp_sections (struct omp_region *region)
3187169689Skan{
3188169689Skan  tree label_vec, l0, l1, l2, t, u, v, sections_stmt;
3189169689Skan  unsigned i, len;
3190169689Skan  basic_block entry_bb, exit_bb, l0_bb, l1_bb, l2_bb, default_bb;
3191169689Skan  block_stmt_iterator si;
3192169689Skan  struct omp_region *inner;
3193169689Skan  edge e;
3194169689Skan
3195169689Skan  entry_bb = region->entry;
3196169689Skan  l0_bb = create_empty_bb (entry_bb);
3197169689Skan  l0 = tree_block_label (l0_bb);
3198169689Skan
3199169689Skan  gcc_assert ((region->cont != NULL) ^ (region->exit == NULL));
3200169689Skan  l1_bb = region->cont;
3201169689Skan  if (l1_bb)
3202169689Skan    {
3203169689Skan      l2_bb = single_succ (l1_bb);
3204169689Skan      default_bb = create_empty_bb (l1_bb->prev_bb);
3205169689Skan
3206169689Skan      l1 = tree_block_label (l1_bb);
3207169689Skan    }
3208169689Skan  else
3209169689Skan    {
3210169689Skan      l2_bb = create_empty_bb (l0_bb);
3211169689Skan      default_bb = l2_bb;
3212169689Skan
3213169689Skan      l1 = NULL;
3214169689Skan    }
3215169689Skan  l2 = tree_block_label (l2_bb);
3216169689Skan
3217169689Skan  exit_bb = region->exit;
3218169689Skan
3219169689Skan  v = create_tmp_var (unsigned_type_node, ".section");
3220169689Skan
3221169689Skan  /* We will build a switch() with enough cases for all the
3222169689Skan     OMP_SECTION regions, a '0' case to handle the end of more work
3223169689Skan     and a default case to abort if something goes wrong.  */
3224169689Skan  len = EDGE_COUNT (entry_bb->succs);
3225169689Skan  label_vec = make_tree_vec (len + 2);
3226169689Skan
3227169689Skan  /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
3228169689Skan     OMP_SECTIONS statement.  */
3229169689Skan  si = bsi_last (entry_bb);
3230169689Skan  sections_stmt = bsi_stmt (si);
3231169689Skan  gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
3232169689Skan  if (!is_combined_parallel (region))
3233169689Skan    {
3234169689Skan      /* If we are not inside a combined parallel+sections region,
3235169689Skan	 call GOMP_sections_start.  */
3236169689Skan      t = build_int_cst (unsigned_type_node, len);
3237169689Skan      t = tree_cons (NULL, t, NULL);
3238169689Skan      u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
3239169689Skan      t = build_function_call_expr (u, t);
3240169689Skan      t = build2 (MODIFY_EXPR, void_type_node, v, t);
3241169689Skan      bsi_insert_after (&si, t, BSI_SAME_STMT);
3242169689Skan    }
3243169689Skan  bsi_remove (&si, true);
3244169689Skan
3245169689Skan  /* The switch() statement replacing OMP_SECTIONS goes in L0_BB.  */
3246169689Skan  si = bsi_start (l0_bb);
3247169689Skan
3248169689Skan  t = build3 (SWITCH_EXPR, void_type_node, v, NULL, label_vec);
3249169689Skan  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3250169689Skan
3251169689Skan  t = build3 (CASE_LABEL_EXPR, void_type_node,
3252169689Skan	      build_int_cst (unsigned_type_node, 0), NULL, l2);
3253169689Skan  TREE_VEC_ELT (label_vec, 0) = t;
3254169689Skan  make_edge (l0_bb, l2_bb, 0);
3255169689Skan
3256169689Skan  /* Convert each OMP_SECTION into a CASE_LABEL_EXPR.  */
3257169689Skan  for (inner = region->inner, i = 1; inner; inner = inner->next, ++i)
3258169689Skan    {
3259169689Skan      basic_block s_entry_bb, s_exit_bb;
3260169689Skan
3261169689Skan      s_entry_bb = inner->entry;
3262169689Skan      s_exit_bb = inner->exit;
3263169689Skan
3264169689Skan      t = tree_block_label (s_entry_bb);
3265169689Skan      u = build_int_cst (unsigned_type_node, i);
3266169689Skan      u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
3267169689Skan      TREE_VEC_ELT (label_vec, i) = u;
3268169689Skan
3269169689Skan      si = bsi_last (s_entry_bb);
3270169689Skan      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTION);
3271169689Skan      gcc_assert (i < len || OMP_SECTION_LAST (bsi_stmt (si)));
3272169689Skan      bsi_remove (&si, true);
3273169689Skan
3274169689Skan      e = single_pred_edge (s_entry_bb);
3275169689Skan      e->flags = 0;
3276169689Skan      redirect_edge_pred (e, l0_bb);
3277169689Skan
3278169689Skan      single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
3279169689Skan
3280169689Skan      if (s_exit_bb == NULL)
3281169689Skan	continue;
3282169689Skan
3283169689Skan      si = bsi_last (s_exit_bb);
3284169689Skan      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
3285169689Skan      bsi_remove (&si, true);
3286169689Skan
3287169689Skan      single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
3288169689Skan    }
3289169689Skan
3290169689Skan  /* Error handling code goes in DEFAULT_BB.  */
3291169689Skan  t = tree_block_label (default_bb);
3292169689Skan  u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
3293169689Skan  TREE_VEC_ELT (label_vec, len + 1) = u;
3294169689Skan  make_edge (l0_bb, default_bb, 0);
3295169689Skan
3296169689Skan  si = bsi_start (default_bb);
3297169689Skan  t = built_in_decls[BUILT_IN_TRAP];
3298169689Skan  t = build_function_call_expr (t, NULL);
3299169689Skan  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3300169689Skan
3301169689Skan  /* Code to get the next section goes in L1_BB.  */
3302169689Skan  if (l1_bb)
3303169689Skan    {
3304169689Skan      si = bsi_last (l1_bb);
3305169689Skan      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
3306169689Skan
3307169689Skan      t = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
3308169689Skan      t = build_function_call_expr (t, NULL);
3309169689Skan      t = build2 (MODIFY_EXPR, void_type_node, v, t);
3310169689Skan      bsi_insert_after (&si, t, BSI_SAME_STMT);
3311169689Skan      bsi_remove (&si, true);
3312169689Skan    }
3313169689Skan
3314169689Skan  /* Cleanup function replaces OMP_RETURN in EXIT_BB.  */
3315169689Skan  if (exit_bb)
3316169689Skan    {
3317169689Skan      si = bsi_last (exit_bb);
3318169689Skan      if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
3319169689Skan	t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
3320169689Skan      else
3321169689Skan	t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
3322169689Skan      t = build_function_call_expr (t, NULL);
3323169689Skan      bsi_insert_after (&si, t, BSI_SAME_STMT);
3324169689Skan      bsi_remove (&si, true);
3325169689Skan    }
3326169689Skan
3327169689Skan  /* Connect the new blocks.  */
3328169689Skan  if (is_combined_parallel (region))
3329169689Skan    {
3330169689Skan      /* If this was a combined parallel+sections region, we did not
3331169689Skan	 emit a GOMP_sections_start in the entry block, so we just
3332169689Skan	 need to jump to L1_BB to get the next section.  */
3333169689Skan      make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
3334169689Skan    }
3335169689Skan  else
3336169689Skan    make_edge (entry_bb, l0_bb, EDGE_FALLTHRU);
3337169689Skan
3338169689Skan  if (l1_bb)
3339169689Skan    {
3340169689Skan      e = single_succ_edge (l1_bb);
3341169689Skan      redirect_edge_succ (e, l0_bb);
3342169689Skan      e->flags = EDGE_FALLTHRU;
3343169689Skan    }
3344169689Skan}
3345169689Skan
3346169689Skan
3347169689Skan/* Expand code for an OpenMP single directive.  We've already expanded
3348169689Skan   much of the code, here we simply place the GOMP_barrier call.  */
3349169689Skan
3350169689Skanstatic void
3351169689Skanexpand_omp_single (struct omp_region *region)
3352169689Skan{
3353169689Skan  basic_block entry_bb, exit_bb;
3354169689Skan  block_stmt_iterator si;
3355169689Skan  bool need_barrier = false;
3356169689Skan
3357169689Skan  entry_bb = region->entry;
3358169689Skan  exit_bb = region->exit;
3359169689Skan
3360169689Skan  si = bsi_last (entry_bb);
3361169689Skan  /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
3362169689Skan     be removed.  We need to ensure that the thread that entered the single
3363169689Skan     does not exit before the data is copied out by the other threads.  */
3364169689Skan  if (find_omp_clause (OMP_SINGLE_CLAUSES (bsi_stmt (si)),
3365169689Skan		       OMP_CLAUSE_COPYPRIVATE))
3366169689Skan    need_barrier = true;
3367169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE);
3368169689Skan  bsi_remove (&si, true);
3369169689Skan  single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3370169689Skan
3371169689Skan  si = bsi_last (exit_bb);
3372169689Skan  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
3373169689Skan    {
3374169689Skan      tree t = alloc_stmt_list ();
3375169689Skan      build_omp_barrier (&t);
3376169689Skan      bsi_insert_after (&si, t, BSI_SAME_STMT);
3377169689Skan    }
3378169689Skan  bsi_remove (&si, true);
3379169689Skan  single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
3380169689Skan}
3381169689Skan
3382169689Skan
3383169689Skan/* Generic expansion for OpenMP synchronization directives: master,
3384169689Skan   ordered and critical.  All we need to do here is remove the entry
3385169689Skan   and exit markers for REGION.  */
3386169689Skan
3387169689Skanstatic void
3388169689Skanexpand_omp_synch (struct omp_region *region)
3389169689Skan{
3390169689Skan  basic_block entry_bb, exit_bb;
3391169689Skan  block_stmt_iterator si;
3392169689Skan
3393169689Skan  entry_bb = region->entry;
3394169689Skan  exit_bb = region->exit;
3395169689Skan
3396169689Skan  si = bsi_last (entry_bb);
3397169689Skan  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE
3398169689Skan	      || TREE_CODE (bsi_stmt (si)) == OMP_MASTER
3399169689Skan	      || TREE_CODE (bsi_stmt (si)) == OMP_ORDERED
3400169689Skan	      || TREE_CODE (bsi_stmt (si)) == OMP_CRITICAL);
3401169689Skan  bsi_remove (&si, true);
3402169689Skan  single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3403169689Skan
3404169689Skan  if (exit_bb)
3405169689Skan    {
3406169689Skan      si = bsi_last (exit_bb);
3407169689Skan      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
3408169689Skan      bsi_remove (&si, true);
3409169689Skan      single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
3410169689Skan    }
3411169689Skan}
3412169689Skan
3413169689Skan
3414169689Skan/* Expand the parallel region tree rooted at REGION.  Expansion
3415169689Skan   proceeds in depth-first order.  Innermost regions are expanded
3416169689Skan   first.  This way, parallel regions that require a new function to
3417169689Skan   be created (e.g., OMP_PARALLEL) can be expanded without having any
3418169689Skan   internal dependencies in their body.  */
3419169689Skan
3420169689Skanstatic void
3421169689Skanexpand_omp (struct omp_region *region)
3422169689Skan{
3423169689Skan  while (region)
3424169689Skan    {
3425169689Skan      if (region->inner)
3426169689Skan	expand_omp (region->inner);
3427169689Skan
3428169689Skan      switch (region->type)
3429169689Skan	{
3430169689Skan	case OMP_PARALLEL:
3431169689Skan	  expand_omp_parallel (region);
3432169689Skan	  break;
3433169689Skan
3434169689Skan	case OMP_FOR:
3435169689Skan	  expand_omp_for (region);
3436169689Skan	  break;
3437169689Skan
3438169689Skan	case OMP_SECTIONS:
3439169689Skan	  expand_omp_sections (region);
3440169689Skan	  break;
3441169689Skan
3442169689Skan	case OMP_SECTION:
3443169689Skan	  /* Individual omp sections are handled together with their
3444169689Skan	     parent OMP_SECTIONS region.  */
3445169689Skan	  break;
3446169689Skan
3447169689Skan	case OMP_SINGLE:
3448169689Skan	  expand_omp_single (region);
3449169689Skan	  break;
3450169689Skan
3451169689Skan	case OMP_MASTER:
3452169689Skan	case OMP_ORDERED:
3453169689Skan	case OMP_CRITICAL:
3454169689Skan	  expand_omp_synch (region);
3455169689Skan	  break;
3456169689Skan
3457169689Skan	default:
3458169689Skan	  gcc_unreachable ();
3459169689Skan	}
3460169689Skan
3461169689Skan      region = region->next;
3462169689Skan    }
3463169689Skan}
3464169689Skan
3465169689Skan
3466169689Skan/* Helper for build_omp_regions.  Scan the dominator tree starting at
3467169689Skan   block BB.  PARENT is the region that contains BB.  */
3468169689Skan
3469169689Skanstatic void
3470169689Skanbuild_omp_regions_1 (basic_block bb, struct omp_region *parent)
3471169689Skan{
3472169689Skan  block_stmt_iterator si;
3473169689Skan  tree stmt;
3474169689Skan  basic_block son;
3475169689Skan
3476169689Skan  si = bsi_last (bb);
3477169689Skan  if (!bsi_end_p (si) && OMP_DIRECTIVE_P (bsi_stmt (si)))
3478169689Skan    {
3479169689Skan      struct omp_region *region;
3480169689Skan      enum tree_code code;
3481169689Skan
3482169689Skan      stmt = bsi_stmt (si);
3483169689Skan      code = TREE_CODE (stmt);
3484169689Skan
3485169689Skan      if (code == OMP_RETURN)
3486169689Skan	{
3487169689Skan	  /* STMT is the return point out of region PARENT.  Mark it
3488169689Skan	     as the exit point and make PARENT the immediately
3489169689Skan	     enclosing region.  */
3490169689Skan	  gcc_assert (parent);
3491169689Skan	  region = parent;
3492169689Skan	  region->exit = bb;
3493169689Skan	  parent = parent->outer;
3494169689Skan
3495169689Skan	  /* If REGION is a parallel region, determine whether it is
3496169689Skan	     a combined parallel+workshare region.  */
3497169689Skan	  if (region->type == OMP_PARALLEL)
3498169689Skan	    determine_parallel_type (region);
3499169689Skan	}
3500169689Skan      else if (code == OMP_CONTINUE)
3501169689Skan	{
3502169689Skan	  gcc_assert (parent);
3503169689Skan	  parent->cont = bb;
3504169689Skan	}
3505169689Skan      else
3506169689Skan	{
3507169689Skan	  /* Otherwise, this directive becomes the parent for a new
3508169689Skan	     region.  */
3509169689Skan	  region = new_omp_region (bb, code, parent);
3510169689Skan	  parent = region;
3511169689Skan	}
3512169689Skan    }
3513169689Skan
3514169689Skan  for (son = first_dom_son (CDI_DOMINATORS, bb);
3515169689Skan       son;
3516169689Skan       son = next_dom_son (CDI_DOMINATORS, son))
3517169689Skan    build_omp_regions_1 (son, parent);
3518169689Skan}
3519169689Skan
3520169689Skan
3521169689Skan/* Scan the CFG and build a tree of OMP regions.  Return the root of
3522169689Skan   the OMP region tree.  */
3523169689Skan
3524169689Skanstatic void
3525169689Skanbuild_omp_regions (void)
3526169689Skan{
3527169689Skan  gcc_assert (root_omp_region == NULL);
3528169689Skan  calculate_dominance_info (CDI_DOMINATORS);
3529169689Skan  build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL);
3530169689Skan}
3531169689Skan
3532169689Skan
3533169689Skan/* Main entry point for expanding OMP-GIMPLE into runtime calls.  */
3534169689Skan
3535169689Skanstatic unsigned int
3536169689Skanexecute_expand_omp (void)
3537169689Skan{
3538169689Skan  build_omp_regions ();
3539169689Skan
3540169689Skan  if (!root_omp_region)
3541169689Skan    return 0;
3542169689Skan
3543169689Skan  if (dump_file)
3544169689Skan    {
3545169689Skan      fprintf (dump_file, "\nOMP region tree\n\n");
3546169689Skan      dump_omp_region (dump_file, root_omp_region, 0);
3547169689Skan      fprintf (dump_file, "\n");
3548169689Skan    }
3549169689Skan
3550169689Skan  remove_exit_barriers (root_omp_region);
3551169689Skan
3552169689Skan  expand_omp (root_omp_region);
3553169689Skan
3554169689Skan  free_dominance_info (CDI_DOMINATORS);
3555169689Skan  free_dominance_info (CDI_POST_DOMINATORS);
3556169689Skan  cleanup_tree_cfg ();
3557169689Skan
3558169689Skan  free_omp_regions ();
3559169689Skan
3560169689Skan  return 0;
3561169689Skan}
3562169689Skan
3563169689Skanstatic bool
3564169689Skangate_expand_omp (void)
3565169689Skan{
3566169689Skan  return flag_openmp != 0 && errorcount == 0;
3567169689Skan}
3568169689Skan
3569169689Skanstruct tree_opt_pass pass_expand_omp =
3570169689Skan{
3571169689Skan  "ompexp",				/* name */
3572169689Skan  gate_expand_omp,			/* gate */
3573169689Skan  execute_expand_omp,			/* execute */
3574169689Skan  NULL,					/* sub */
3575169689Skan  NULL,					/* next */
3576169689Skan  0,					/* static_pass_number */
3577169689Skan  0,					/* tv_id */
3578169689Skan  PROP_gimple_any,			/* properties_required */
3579169689Skan  PROP_gimple_lomp,			/* properties_provided */
3580169689Skan  0,					/* properties_destroyed */
3581169689Skan  0,					/* todo_flags_start */
3582169689Skan  TODO_dump_func,			/* todo_flags_finish */
3583169689Skan  0					/* letter */
3584169689Skan};
3585169689Skan
3586169689Skan/* Routines to lower OpenMP directives into OMP-GIMPLE.  */
3587169689Skan
3588169689Skan/* Lower the OpenMP sections directive in *STMT_P.  */
3589169689Skan
3590169689Skanstatic void
3591169689Skanlower_omp_sections (tree *stmt_p, omp_context *ctx)
3592169689Skan{
3593169689Skan  tree new_stmt, stmt, body, bind, block, ilist, olist, new_body;
3594169689Skan  tree t, dlist;
3595169689Skan  tree_stmt_iterator tsi;
3596169689Skan  unsigned i, len;
3597169689Skan
3598169689Skan  stmt = *stmt_p;
3599169689Skan
3600169689Skan  push_gimplify_context ();
3601169689Skan
3602169689Skan  dlist = NULL;
3603169689Skan  ilist = NULL;
3604169689Skan  lower_rec_input_clauses (OMP_SECTIONS_CLAUSES (stmt), &ilist, &dlist, ctx);
3605169689Skan
3606169689Skan  tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
3607169689Skan  for (len = 0; !tsi_end_p (tsi); len++, tsi_next (&tsi))
3608169689Skan    continue;
3609169689Skan
3610169689Skan  tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
3611169689Skan  body = alloc_stmt_list ();
3612169689Skan  for (i = 0; i < len; i++, tsi_next (&tsi))
3613169689Skan    {
3614169689Skan      omp_context *sctx;
3615169689Skan      tree sec_start, sec_end;
3616169689Skan
3617169689Skan      sec_start = tsi_stmt (tsi);
3618169689Skan      sctx = maybe_lookup_ctx (sec_start);
3619169689Skan      gcc_assert (sctx);
3620169689Skan
3621169689Skan      append_to_statement_list (sec_start, &body);
3622169689Skan
3623169689Skan      lower_omp (&OMP_SECTION_BODY (sec_start), sctx);
3624169689Skan      append_to_statement_list (OMP_SECTION_BODY (sec_start), &body);
3625169689Skan      OMP_SECTION_BODY (sec_start) = NULL;
3626169689Skan
3627169689Skan      if (i == len - 1)
3628169689Skan	{
3629169689Skan	  tree l = alloc_stmt_list ();
3630169689Skan	  lower_lastprivate_clauses (OMP_SECTIONS_CLAUSES (stmt), NULL,
3631169689Skan				     &l, ctx);
3632169689Skan	  append_to_statement_list (l, &body);
3633169689Skan	  OMP_SECTION_LAST (sec_start) = 1;
3634169689Skan	}
3635169689Skan
3636169689Skan      sec_end = make_node (OMP_RETURN);
3637169689Skan      append_to_statement_list (sec_end, &body);
3638169689Skan    }
3639169689Skan
3640169689Skan  block = make_node (BLOCK);
3641169689Skan  bind = build3 (BIND_EXPR, void_type_node, NULL, body, block);
3642169689Skan
3643169689Skan  olist = NULL_TREE;
3644169689Skan  lower_reduction_clauses (OMP_SECTIONS_CLAUSES (stmt), &olist, ctx);
3645169689Skan
3646169689Skan  pop_gimplify_context (NULL_TREE);
3647169689Skan  record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
3648169689Skan
3649169689Skan  new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
3650169689Skan  TREE_SIDE_EFFECTS (new_stmt) = 1;
3651169689Skan
3652169689Skan  new_body = alloc_stmt_list ();
3653169689Skan  append_to_statement_list (ilist, &new_body);
3654169689Skan  append_to_statement_list (stmt, &new_body);
3655169689Skan  append_to_statement_list (bind, &new_body);
3656169689Skan
3657169689Skan  t = make_node (OMP_CONTINUE);
3658169689Skan  append_to_statement_list (t, &new_body);
3659169689Skan
3660169689Skan  append_to_statement_list (olist, &new_body);
3661169689Skan  append_to_statement_list (dlist, &new_body);
3662169689Skan
3663169689Skan  maybe_catch_exception (&new_body);
3664169689Skan
3665169689Skan  t = make_node (OMP_RETURN);
3666169689Skan  OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SECTIONS_CLAUSES (stmt),
3667169689Skan					     OMP_CLAUSE_NOWAIT);
3668169689Skan  append_to_statement_list (t, &new_body);
3669169689Skan
3670169689Skan  BIND_EXPR_BODY (new_stmt) = new_body;
3671169689Skan  OMP_SECTIONS_BODY (stmt) = NULL;
3672169689Skan
3673169689Skan  *stmt_p = new_stmt;
3674169689Skan}
3675169689Skan
3676169689Skan
3677169689Skan/* A subroutine of lower_omp_single.  Expand the simple form of
3678169689Skan   an OMP_SINGLE, without a copyprivate clause:
3679169689Skan
3680169689Skan     	if (GOMP_single_start ())
3681169689Skan	  BODY;
3682169689Skan	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
3683169689Skan
3684169689Skan  FIXME.  It may be better to delay expanding the logic of this until
3685169689Skan  pass_expand_omp.  The expanded logic may make the job more difficult
3686169689Skan  to a synchronization analysis pass.  */
3687169689Skan
3688169689Skanstatic void
3689169689Skanlower_omp_single_simple (tree single_stmt, tree *pre_p)
3690169689Skan{
3691169689Skan  tree t;
3692169689Skan
3693169689Skan  t = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
3694169689Skan  t = build_function_call_expr (t, NULL);
3695169689Skan  t = build3 (COND_EXPR, void_type_node, t,
3696169689Skan	      OMP_SINGLE_BODY (single_stmt), NULL);
3697169689Skan  gimplify_and_add (t, pre_p);
3698169689Skan}
3699169689Skan
3700169689Skan
3701169689Skan/* A subroutine of lower_omp_single.  Expand the simple form of
3702169689Skan   an OMP_SINGLE, with a copyprivate clause:
3703169689Skan
3704169689Skan	#pragma omp single copyprivate (a, b, c)
3705169689Skan
3706169689Skan   Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
3707169689Skan
3708169689Skan      {
3709169689Skan	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
3710169689Skan	  {
3711169689Skan	    BODY;
3712169689Skan	    copyout.a = a;
3713169689Skan	    copyout.b = b;
3714169689Skan	    copyout.c = c;
3715169689Skan	    GOMP_single_copy_end (&copyout);
3716169689Skan	  }
3717169689Skan	else
3718169689Skan	  {
3719169689Skan	    a = copyout_p->a;
3720169689Skan	    b = copyout_p->b;
3721169689Skan	    c = copyout_p->c;
3722169689Skan	  }
3723169689Skan	GOMP_barrier ();
3724169689Skan      }
3725169689Skan
3726169689Skan  FIXME.  It may be better to delay expanding the logic of this until
3727169689Skan  pass_expand_omp.  The expanded logic may make the job more difficult
3728169689Skan  to a synchronization analysis pass.  */
3729169689Skan
3730169689Skanstatic void
3731169689Skanlower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
3732169689Skan{
3733169689Skan  tree ptr_type, t, args, l0, l1, l2, copyin_seq;
3734169689Skan
3735169689Skan  ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
3736169689Skan
3737169689Skan  ptr_type = build_pointer_type (ctx->record_type);
3738169689Skan  ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
3739169689Skan
3740169689Skan  l0 = create_artificial_label ();
3741169689Skan  l1 = create_artificial_label ();
3742169689Skan  l2 = create_artificial_label ();
3743169689Skan
3744169689Skan  t = built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START];
3745169689Skan  t = build_function_call_expr (t, NULL);
3746169689Skan  t = fold_convert (ptr_type, t);
3747169689Skan  t = build2 (MODIFY_EXPR, void_type_node, ctx->receiver_decl, t);
3748169689Skan  gimplify_and_add (t, pre_p);
3749169689Skan
3750169689Skan  t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
3751169689Skan	      build_int_cst (ptr_type, 0));
3752169689Skan  t = build3 (COND_EXPR, void_type_node, t,
3753169689Skan	      build_and_jump (&l0), build_and_jump (&l1));
3754169689Skan  gimplify_and_add (t, pre_p);
3755169689Skan
3756169689Skan  t = build1 (LABEL_EXPR, void_type_node, l0);
3757169689Skan  gimplify_and_add (t, pre_p);
3758169689Skan
3759169689Skan  append_to_statement_list (OMP_SINGLE_BODY (single_stmt), pre_p);
3760169689Skan
3761169689Skan  copyin_seq = NULL;
3762169689Skan  lower_copyprivate_clauses (OMP_SINGLE_CLAUSES (single_stmt), pre_p,
3763169689Skan			      &copyin_seq, ctx);
3764169689Skan
3765169689Skan  t = build_fold_addr_expr (ctx->sender_decl);
3766169689Skan  args = tree_cons (NULL, t, NULL);
3767169689Skan  t = built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END];
3768169689Skan  t = build_function_call_expr (t, args);
3769169689Skan  gimplify_and_add (t, pre_p);
3770169689Skan
3771169689Skan  t = build_and_jump (&l2);
3772169689Skan  gimplify_and_add (t, pre_p);
3773169689Skan
3774169689Skan  t = build1 (LABEL_EXPR, void_type_node, l1);
3775169689Skan  gimplify_and_add (t, pre_p);
3776169689Skan
3777169689Skan  append_to_statement_list (copyin_seq, pre_p);
3778169689Skan
3779169689Skan  t = build1 (LABEL_EXPR, void_type_node, l2);
3780169689Skan  gimplify_and_add (t, pre_p);
3781169689Skan}
3782169689Skan
3783169689Skan
3784169689Skan/* Expand code for an OpenMP single directive.  */
3785169689Skan
3786169689Skanstatic void
3787169689Skanlower_omp_single (tree *stmt_p, omp_context *ctx)
3788169689Skan{
3789169689Skan  tree t, bind, block, single_stmt = *stmt_p, dlist;
3790169689Skan
3791169689Skan  push_gimplify_context ();
3792169689Skan
3793169689Skan  block = make_node (BLOCK);
3794169689Skan  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
3795169689Skan  TREE_SIDE_EFFECTS (bind) = 1;
3796169689Skan
3797169689Skan  lower_rec_input_clauses (OMP_SINGLE_CLAUSES (single_stmt),
3798169689Skan			   &BIND_EXPR_BODY (bind), &dlist, ctx);
3799169689Skan  lower_omp (&OMP_SINGLE_BODY (single_stmt), ctx);
3800169689Skan
3801169689Skan  append_to_statement_list (single_stmt, &BIND_EXPR_BODY (bind));
3802169689Skan
3803169689Skan  if (ctx->record_type)
3804169689Skan    lower_omp_single_copy (single_stmt, &BIND_EXPR_BODY (bind), ctx);
3805169689Skan  else
3806169689Skan    lower_omp_single_simple (single_stmt, &BIND_EXPR_BODY (bind));
3807169689Skan
3808169689Skan  OMP_SINGLE_BODY (single_stmt) = NULL;
3809169689Skan
3810169689Skan  append_to_statement_list (dlist, &BIND_EXPR_BODY (bind));
3811169689Skan
3812169689Skan  maybe_catch_exception (&BIND_EXPR_BODY (bind));
3813169689Skan
3814169689Skan  t = make_node (OMP_RETURN);
3815169689Skan  OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SINGLE_CLAUSES (single_stmt),
3816169689Skan					     OMP_CLAUSE_NOWAIT);
3817169689Skan  append_to_statement_list (t, &BIND_EXPR_BODY (bind));
3818169689Skan
3819169689Skan  pop_gimplify_context (bind);
3820169689Skan
3821169689Skan  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3822169689Skan  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3823169689Skan}
3824169689Skan
3825169689Skan
3826169689Skan/* Expand code for an OpenMP master directive.  */
3827169689Skan
3828169689Skanstatic void
3829169689Skanlower_omp_master (tree *stmt_p, omp_context *ctx)
3830169689Skan{
3831169689Skan  tree bind, block, stmt = *stmt_p, lab = NULL, x;
3832169689Skan
3833169689Skan  push_gimplify_context ();
3834169689Skan
3835169689Skan  block = make_node (BLOCK);
3836169689Skan  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
3837169689Skan  TREE_SIDE_EFFECTS (bind) = 1;
3838169689Skan
3839169689Skan  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3840169689Skan
3841169689Skan  x = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
3842169689Skan  x = build_function_call_expr (x, NULL);
3843169689Skan  x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
3844169689Skan  x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
3845169689Skan  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3846169689Skan
3847169689Skan  lower_omp (&OMP_MASTER_BODY (stmt), ctx);
3848169689Skan  maybe_catch_exception (&OMP_MASTER_BODY (stmt));
3849169689Skan  append_to_statement_list (OMP_MASTER_BODY (stmt), &BIND_EXPR_BODY (bind));
3850169689Skan  OMP_MASTER_BODY (stmt) = NULL;
3851169689Skan
3852169689Skan  x = build1 (LABEL_EXPR, void_type_node, lab);
3853169689Skan  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3854169689Skan
3855169689Skan  x = make_node (OMP_RETURN);
3856169689Skan  OMP_RETURN_NOWAIT (x) = 1;
3857169689Skan  append_to_statement_list (x, &BIND_EXPR_BODY (bind));
3858169689Skan
3859169689Skan  pop_gimplify_context (bind);
3860169689Skan
3861169689Skan  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3862169689Skan  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3863169689Skan}
3864169689Skan
3865169689Skan
3866169689Skan/* Expand code for an OpenMP ordered directive.  */
3867169689Skan
3868169689Skanstatic void
3869169689Skanlower_omp_ordered (tree *stmt_p, omp_context *ctx)
3870169689Skan{
3871169689Skan  tree bind, block, stmt = *stmt_p, x;
3872169689Skan
3873169689Skan  push_gimplify_context ();
3874169689Skan
3875169689Skan  block = make_node (BLOCK);
3876169689Skan  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
3877169689Skan  TREE_SIDE_EFFECTS (bind) = 1;
3878169689Skan
3879169689Skan  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3880169689Skan
3881169689Skan  x = built_in_decls[BUILT_IN_GOMP_ORDERED_START];
3882169689Skan  x = build_function_call_expr (x, NULL);
3883169689Skan  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3884169689Skan
3885169689Skan  lower_omp (&OMP_ORDERED_BODY (stmt), ctx);
3886169689Skan  maybe_catch_exception (&OMP_ORDERED_BODY (stmt));
3887169689Skan  append_to_statement_list (OMP_ORDERED_BODY (stmt), &BIND_EXPR_BODY (bind));
3888169689Skan  OMP_ORDERED_BODY (stmt) = NULL;
3889169689Skan
3890169689Skan  x = built_in_decls[BUILT_IN_GOMP_ORDERED_END];
3891169689Skan  x = build_function_call_expr (x, NULL);
3892169689Skan  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3893169689Skan
3894169689Skan  x = make_node (OMP_RETURN);
3895169689Skan  OMP_RETURN_NOWAIT (x) = 1;
3896169689Skan  append_to_statement_list (x, &BIND_EXPR_BODY (bind));
3897169689Skan
3898169689Skan  pop_gimplify_context (bind);
3899169689Skan
3900169689Skan  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3901169689Skan  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3902169689Skan}
3903169689Skan
3904169689Skan
3905169689Skan/* Gimplify an OMP_CRITICAL statement.  This is a relatively simple
3906169689Skan   substitution of a couple of function calls.  But in the NAMED case,
3907169689Skan   requires that languages coordinate a symbol name.  It is therefore
3908169689Skan   best put here in common code.  */
3909169689Skan
3910169689Skanstatic GTY((param1_is (tree), param2_is (tree)))
3911169689Skan  splay_tree critical_name_mutexes;
3912169689Skan
3913169689Skanstatic void
3914169689Skanlower_omp_critical (tree *stmt_p, omp_context *ctx)
3915169689Skan{
3916169689Skan  tree bind, block, stmt = *stmt_p;
3917169689Skan  tree t, lock, unlock, name;
3918169689Skan
3919169689Skan  name = OMP_CRITICAL_NAME (stmt);
3920169689Skan  if (name)
3921169689Skan    {
3922169689Skan      tree decl, args;
3923169689Skan      splay_tree_node n;
3924169689Skan
3925169689Skan      if (!critical_name_mutexes)
3926169689Skan	critical_name_mutexes
3927169689Skan	  = splay_tree_new_ggc (splay_tree_compare_pointers);
3928169689Skan
3929169689Skan      n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
3930169689Skan      if (n == NULL)
3931169689Skan	{
3932169689Skan	  char *new_str;
3933169689Skan
3934169689Skan	  decl = create_tmp_var_raw (ptr_type_node, NULL);
3935169689Skan
3936169689Skan	  new_str = ACONCAT ((".gomp_critical_user_",
3937169689Skan			      IDENTIFIER_POINTER (name), NULL));
3938169689Skan	  DECL_NAME (decl) = get_identifier (new_str);
3939169689Skan	  TREE_PUBLIC (decl) = 1;
3940169689Skan	  TREE_STATIC (decl) = 1;
3941169689Skan	  DECL_COMMON (decl) = 1;
3942169689Skan	  DECL_ARTIFICIAL (decl) = 1;
3943169689Skan	  DECL_IGNORED_P (decl) = 1;
3944169689Skan	  cgraph_varpool_finalize_decl (decl);
3945169689Skan
3946169689Skan	  splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
3947169689Skan			     (splay_tree_value) decl);
3948169689Skan	}
3949169689Skan      else
3950169689Skan	decl = (tree) n->value;
3951169689Skan
3952169689Skan      args = tree_cons (NULL, build_fold_addr_expr (decl), NULL);
3953169689Skan      lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
3954169689Skan      lock = build_function_call_expr (lock, args);
3955169689Skan
3956169689Skan      args = tree_cons (NULL, build_fold_addr_expr (decl), NULL);
3957169689Skan      unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
3958169689Skan      unlock = build_function_call_expr (unlock, args);
3959169689Skan    }
3960169689Skan  else
3961169689Skan    {
3962169689Skan      lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
3963169689Skan      lock = build_function_call_expr (lock, NULL);
3964169689Skan
3965169689Skan      unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
3966169689Skan      unlock = build_function_call_expr (unlock, NULL);
3967169689Skan    }
3968169689Skan
3969169689Skan  push_gimplify_context ();
3970169689Skan
3971169689Skan  block = make_node (BLOCK);
3972169689Skan  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
3973169689Skan  TREE_SIDE_EFFECTS (bind) = 1;
3974169689Skan
3975169689Skan  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3976169689Skan
3977169689Skan  gimplify_and_add (lock, &BIND_EXPR_BODY (bind));
3978169689Skan
3979169689Skan  lower_omp (&OMP_CRITICAL_BODY (stmt), ctx);
3980169689Skan  maybe_catch_exception (&OMP_CRITICAL_BODY (stmt));
3981169689Skan  append_to_statement_list (OMP_CRITICAL_BODY (stmt), &BIND_EXPR_BODY (bind));
3982169689Skan  OMP_CRITICAL_BODY (stmt) = NULL;
3983169689Skan
3984169689Skan  gimplify_and_add (unlock, &BIND_EXPR_BODY (bind));
3985169689Skan
3986169689Skan  t = make_node (OMP_RETURN);
3987169689Skan  OMP_RETURN_NOWAIT (t) = 1;
3988169689Skan  append_to_statement_list (t, &BIND_EXPR_BODY (bind));
3989169689Skan
3990169689Skan  pop_gimplify_context (bind);
3991169689Skan  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3992169689Skan  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3993169689Skan}
3994169689Skan
3995169689Skan
3996169689Skan/* A subroutine of lower_omp_for.  Generate code to emit the predicate
3997169689Skan   for a lastprivate clause.  Given a loop control predicate of (V
3998169689Skan   cond N2), we gate the clause on (!(V cond N2)).  The lowered form
3999169689Skan   is appended to *DLIST, iterator initialization is appended to
4000169689Skan   *BODY_P.  */
4001169689Skan
4002169689Skanstatic void
4003169689Skanlower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
4004169689Skan			   tree *dlist, struct omp_context *ctx)
4005169689Skan{
4006169689Skan  tree clauses, cond, stmts, vinit, t;
4007169689Skan  enum tree_code cond_code;
4008169689Skan
4009169689Skan  cond_code = fd->cond_code;
4010169689Skan  cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
4011169689Skan
4012169689Skan  /* When possible, use a strict equality expression.  This can let VRP
4013169689Skan     type optimizations deduce the value and remove a copy.  */
4014169689Skan  if (host_integerp (fd->step, 0))
4015169689Skan    {
4016169689Skan      HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->step);
4017169689Skan      if (step == 1 || step == -1)
4018169689Skan	cond_code = EQ_EXPR;
4019169689Skan    }
4020169689Skan
4021169689Skan  cond = build2 (cond_code, boolean_type_node, fd->v, fd->n2);
4022169689Skan
4023169689Skan  clauses = OMP_FOR_CLAUSES (fd->for_stmt);
4024169689Skan  stmts = NULL;
4025169689Skan  lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
4026169689Skan  if (stmts != NULL)
4027169689Skan    {
4028169689Skan      append_to_statement_list (stmts, dlist);
4029169689Skan
4030169689Skan      /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
4031169689Skan      vinit = fd->n1;
4032169689Skan      if (cond_code == EQ_EXPR
4033169689Skan	  && host_integerp (fd->n2, 0)
4034169689Skan	  && ! integer_zerop (fd->n2))
4035169689Skan	vinit = build_int_cst (TREE_TYPE (fd->v), 0);
4036169689Skan
4037169689Skan      /* Initialize the iterator variable, so that threads that don't execute
4038169689Skan	 any iterations don't execute the lastprivate clauses by accident.  */
4039169689Skan      t = build2 (MODIFY_EXPR, void_type_node, fd->v, vinit);
4040169689Skan      gimplify_and_add (t, body_p);
4041169689Skan    }
4042169689Skan}
4043169689Skan
4044169689Skan
4045169689Skan/* Lower code for an OpenMP loop directive.  */
4046169689Skan
4047169689Skanstatic void
4048169689Skanlower_omp_for (tree *stmt_p, omp_context *ctx)
4049169689Skan{
4050169689Skan  tree t, stmt, ilist, dlist, new_stmt, *body_p, *rhs_p;
4051169689Skan  struct omp_for_data fd;
4052169689Skan
4053169689Skan  stmt = *stmt_p;
4054169689Skan
4055169689Skan  push_gimplify_context ();
4056169689Skan
4057169689Skan  lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
4058169689Skan  lower_omp (&OMP_FOR_BODY (stmt), ctx);
4059169689Skan
4060169689Skan  /* Move declaration of temporaries in the loop body before we make
4061169689Skan     it go away.  */
4062169689Skan  if (TREE_CODE (OMP_FOR_BODY (stmt)) == BIND_EXPR)
4063169689Skan    record_vars_into (BIND_EXPR_VARS (OMP_FOR_BODY (stmt)), ctx->cb.dst_fn);
4064169689Skan
4065169689Skan  new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
4066169689Skan  TREE_SIDE_EFFECTS (new_stmt) = 1;
4067169689Skan  body_p = &BIND_EXPR_BODY (new_stmt);
4068169689Skan
4069169689Skan  /* The pre-body and input clauses go before the lowered OMP_FOR.  */
4070169689Skan  ilist = NULL;
4071169689Skan  dlist = NULL;
4072169689Skan  append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
4073169689Skan  lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
4074169689Skan
4075169689Skan  /* Lower the header expressions.  At this point, we can assume that
4076169689Skan     the header is of the form:
4077169689Skan
4078169689Skan     	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
4079169689Skan
4080169689Skan     We just need to make sure that VAL1, VAL2 and VAL3 are lowered
4081169689Skan     using the .omp_data_s mapping, if needed.  */
4082169689Skan  rhs_p = &TREE_OPERAND (OMP_FOR_INIT (stmt), 1);
4083169689Skan  if (!is_gimple_min_invariant (*rhs_p))
4084169689Skan    *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
4085169689Skan
4086169689Skan  rhs_p = &TREE_OPERAND (OMP_FOR_COND (stmt), 1);
4087169689Skan  if (!is_gimple_min_invariant (*rhs_p))
4088169689Skan    *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
4089169689Skan
4090169689Skan  rhs_p = &TREE_OPERAND (TREE_OPERAND (OMP_FOR_INCR (stmt), 1), 1);
4091169689Skan  if (!is_gimple_min_invariant (*rhs_p))
4092169689Skan    *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
4093169689Skan
4094169689Skan  /* Once lowered, extract the bounds and clauses.  */
4095169689Skan  extract_omp_for_data (stmt, &fd);
4096169689Skan
4097169689Skan  lower_omp_for_lastprivate (&fd, body_p, &dlist, ctx);
4098169689Skan
4099169689Skan  append_to_statement_list (stmt, body_p);
4100169689Skan
4101169689Skan  append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
4102169689Skan
4103169689Skan  t = make_node (OMP_CONTINUE);
4104169689Skan  append_to_statement_list (t, body_p);
4105169689Skan
4106169689Skan  /* After the loop, add exit clauses.  */
4107169689Skan  lower_reduction_clauses (OMP_FOR_CLAUSES (stmt), body_p, ctx);
4108169689Skan  append_to_statement_list (dlist, body_p);
4109169689Skan
4110169689Skan  maybe_catch_exception (body_p);
4111169689Skan
4112169689Skan  /* Region exit marker goes at the end of the loop body.  */
4113169689Skan  t = make_node (OMP_RETURN);
4114169689Skan  OMP_RETURN_NOWAIT (t) = fd.have_nowait;
4115169689Skan  append_to_statement_list (t, body_p);
4116169689Skan
4117169689Skan  pop_gimplify_context (NULL_TREE);
4118169689Skan  record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
4119169689Skan
4120169689Skan  OMP_FOR_BODY (stmt) = NULL_TREE;
4121169689Skan  OMP_FOR_PRE_BODY (stmt) = NULL_TREE;
4122169689Skan  *stmt_p = new_stmt;
4123169689Skan}
4124169689Skan
4125171825Skan/* Callback for walk_stmts.  Check if *TP only contains OMP_FOR
4126171825Skan   or OMP_PARALLEL.  */
4127169689Skan
4128171825Skanstatic tree
4129171825Skancheck_combined_parallel (tree *tp, int *walk_subtrees, void *data)
4130171825Skan{
4131171825Skan  struct walk_stmt_info *wi = data;
4132171825Skan  int *info = wi->info;
4133171825Skan
4134171825Skan  *walk_subtrees = 0;
4135171825Skan  switch (TREE_CODE (*tp))
4136171825Skan    {
4137171825Skan    case OMP_FOR:
4138171825Skan    case OMP_SECTIONS:
4139171825Skan      *info = *info == 0 ? 1 : -1;
4140171825Skan      break;
4141171825Skan    default:
4142171825Skan      *info = -1;
4143171825Skan      break;
4144171825Skan    }
4145171825Skan  return NULL;
4146171825Skan}
4147171825Skan
4148169689Skan/* Lower the OpenMP parallel directive in *STMT_P.  CTX holds context
4149169689Skan   information for the directive.  */
4150169689Skan
4151169689Skanstatic void
4152169689Skanlower_omp_parallel (tree *stmt_p, omp_context *ctx)
4153169689Skan{
4154169689Skan  tree clauses, par_bind, par_body, new_body, bind;
4155169689Skan  tree olist, ilist, par_olist, par_ilist;
4156169689Skan  tree stmt, child_fn, t;
4157169689Skan
4158169689Skan  stmt = *stmt_p;
4159169689Skan
4160169689Skan  clauses = OMP_PARALLEL_CLAUSES (stmt);
4161169689Skan  par_bind = OMP_PARALLEL_BODY (stmt);
4162169689Skan  par_body = BIND_EXPR_BODY (par_bind);
4163169689Skan  child_fn = ctx->cb.dst_fn;
4164171825Skan  if (!OMP_PARALLEL_COMBINED (stmt))
4165171825Skan    {
4166171825Skan      struct walk_stmt_info wi;
4167171825Skan      int ws_num = 0;
4168169689Skan
4169171825Skan      memset (&wi, 0, sizeof (wi));
4170171825Skan      wi.callback = check_combined_parallel;
4171171825Skan      wi.info = &ws_num;
4172171825Skan      wi.val_only = true;
4173171825Skan      walk_stmts (&wi, &par_bind);
4174171825Skan      if (ws_num == 1)
4175171825Skan	OMP_PARALLEL_COMBINED (stmt) = 1;
4176171825Skan    }
4177171825Skan
4178169689Skan  push_gimplify_context ();
4179169689Skan
4180169689Skan  par_olist = NULL_TREE;
4181169689Skan  par_ilist = NULL_TREE;
4182169689Skan  lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
4183169689Skan  lower_omp (&par_body, ctx);
4184169689Skan  lower_reduction_clauses (clauses, &par_olist, ctx);
4185169689Skan
4186169689Skan  /* Declare all the variables created by mapping and the variables
4187169689Skan     declared in the scope of the parallel body.  */
4188169689Skan  record_vars_into (ctx->block_vars, child_fn);
4189169689Skan  record_vars_into (BIND_EXPR_VARS (par_bind), child_fn);
4190169689Skan
4191169689Skan  if (ctx->record_type)
4192169689Skan    {
4193169689Skan      ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_data_o");
4194169689Skan      OMP_PARALLEL_DATA_ARG (stmt) = ctx->sender_decl;
4195169689Skan    }
4196169689Skan
4197169689Skan  olist = NULL_TREE;
4198169689Skan  ilist = NULL_TREE;
4199169689Skan  lower_send_clauses (clauses, &ilist, &olist, ctx);
4200169689Skan  lower_send_shared_vars (&ilist, &olist, ctx);
4201169689Skan
4202169689Skan  /* Once all the expansions are done, sequence all the different
4203169689Skan     fragments inside OMP_PARALLEL_BODY.  */
4204169689Skan  bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
4205169689Skan  append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
4206169689Skan
4207169689Skan  new_body = alloc_stmt_list ();
4208169689Skan
4209169689Skan  if (ctx->record_type)
4210169689Skan    {
4211169689Skan      t = build_fold_addr_expr (ctx->sender_decl);
4212169689Skan      /* fixup_child_record_type might have changed receiver_decl's type.  */
4213169689Skan      t = fold_convert (TREE_TYPE (ctx->receiver_decl), t);
4214169689Skan      t = build2 (MODIFY_EXPR, void_type_node, ctx->receiver_decl, t);
4215169689Skan      append_to_statement_list (t, &new_body);
4216169689Skan    }
4217169689Skan
4218169689Skan  append_to_statement_list (par_ilist, &new_body);
4219169689Skan  append_to_statement_list (par_body, &new_body);
4220169689Skan  append_to_statement_list (par_olist, &new_body);
4221169689Skan  maybe_catch_exception (&new_body);
4222169689Skan  t = make_node (OMP_RETURN);
4223169689Skan  append_to_statement_list (t, &new_body);
4224169689Skan  OMP_PARALLEL_BODY (stmt) = new_body;
4225169689Skan
4226169689Skan  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
4227169689Skan  append_to_statement_list (olist, &BIND_EXPR_BODY (bind));
4228169689Skan
4229169689Skan  *stmt_p = bind;
4230169689Skan
4231169689Skan  pop_gimplify_context (NULL_TREE);
4232169689Skan}
4233169689Skan
4234169689Skan
4235169689Skan/* Pass *TP back through the gimplifier within the context determined by WI.
4236169689Skan   This handles replacement of DECL_VALUE_EXPR, as well as adjusting the
4237169689Skan   flags on ADDR_EXPR.  */
4238169689Skan
4239169689Skanstatic void
4240169689Skanlower_regimplify (tree *tp, struct walk_stmt_info *wi)
4241169689Skan{
4242169689Skan  enum gimplify_status gs;
4243169689Skan  tree pre = NULL;
4244169689Skan
4245169689Skan  if (wi->is_lhs)
4246169689Skan    gs = gimplify_expr (tp, &pre, NULL, is_gimple_lvalue, fb_lvalue);
4247169689Skan  else if (wi->val_only)
4248169689Skan    gs = gimplify_expr (tp, &pre, NULL, is_gimple_val, fb_rvalue);
4249169689Skan  else
4250169689Skan    gs = gimplify_expr (tp, &pre, NULL, is_gimple_formal_tmp_var, fb_rvalue);
4251169689Skan  gcc_assert (gs == GS_ALL_DONE);
4252169689Skan
4253169689Skan  if (pre)
4254169689Skan    tsi_link_before (&wi->tsi, pre, TSI_SAME_STMT);
4255169689Skan}
4256169689Skan
4257169689Skan/* Copy EXP into a temporary.  Insert the initialization statement before TSI.  */
4258169689Skan
4259169689Skanstatic tree
4260169689Skaninit_tmp_var (tree exp, tree_stmt_iterator *tsi)
4261169689Skan{
4262169689Skan  tree t, stmt;
4263169689Skan
4264169689Skan  t = create_tmp_var (TREE_TYPE (exp), NULL);
4265169689Skan  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
4266169689Skan    DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
4267169689Skan  stmt = build2 (MODIFY_EXPR, TREE_TYPE (t), t, exp);
4268169689Skan  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
4269169689Skan  tsi_link_before (tsi, stmt, TSI_SAME_STMT);
4270169689Skan
4271169689Skan  return t;
4272169689Skan}
4273169689Skan
4274169689Skan/* Similarly, but copy from the temporary and insert the statement
4275169689Skan   after the iterator.  */
4276169689Skan
4277169689Skanstatic tree
4278169689Skansave_tmp_var (tree exp, tree_stmt_iterator *tsi)
4279169689Skan{
4280169689Skan  tree t, stmt;
4281169689Skan
4282169689Skan  t = create_tmp_var (TREE_TYPE (exp), NULL);
4283169689Skan  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
4284169689Skan    DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
4285169689Skan  stmt = build2 (MODIFY_EXPR, TREE_TYPE (t), exp, t);
4286169689Skan  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
4287169689Skan  tsi_link_after (tsi, stmt, TSI_SAME_STMT);
4288169689Skan
4289169689Skan  return t;
4290169689Skan}
4291169689Skan
4292169689Skan/* Callback for walk_stmts.  Lower the OpenMP directive pointed by TP.  */
4293169689Skan
4294169689Skanstatic tree
4295169689Skanlower_omp_1 (tree *tp, int *walk_subtrees, void *data)
4296169689Skan{
4297169689Skan  struct walk_stmt_info *wi = data;
4298169689Skan  omp_context *ctx = wi->info;
4299169689Skan  tree t = *tp;
4300169689Skan
4301169689Skan  /* If we have issued syntax errors, avoid doing any heavy lifting.
4302169689Skan     Just replace the OpenMP directives with a NOP to avoid
4303169689Skan     confusing RTL expansion.  */
4304169689Skan  if (errorcount && OMP_DIRECTIVE_P (*tp))
4305169689Skan    {
4306169689Skan      *tp = build_empty_stmt ();
4307169689Skan      return NULL_TREE;
4308169689Skan    }
4309169689Skan
4310169689Skan  *walk_subtrees = 0;
4311169689Skan  switch (TREE_CODE (*tp))
4312169689Skan    {
4313169689Skan    case OMP_PARALLEL:
4314169689Skan      ctx = maybe_lookup_ctx (t);
4315169689Skan      lower_omp_parallel (tp, ctx);
4316169689Skan      break;
4317169689Skan
4318169689Skan    case OMP_FOR:
4319169689Skan      ctx = maybe_lookup_ctx (t);
4320169689Skan      gcc_assert (ctx);
4321169689Skan      lower_omp_for (tp, ctx);
4322169689Skan      break;
4323169689Skan
4324169689Skan    case OMP_SECTIONS:
4325169689Skan      ctx = maybe_lookup_ctx (t);
4326169689Skan      gcc_assert (ctx);
4327169689Skan      lower_omp_sections (tp, ctx);
4328169689Skan      break;
4329169689Skan
4330169689Skan    case OMP_SINGLE:
4331169689Skan      ctx = maybe_lookup_ctx (t);
4332169689Skan      gcc_assert (ctx);
4333169689Skan      lower_omp_single (tp, ctx);
4334169689Skan      break;
4335169689Skan
4336169689Skan    case OMP_MASTER:
4337169689Skan      ctx = maybe_lookup_ctx (t);
4338169689Skan      gcc_assert (ctx);
4339169689Skan      lower_omp_master (tp, ctx);
4340169689Skan      break;
4341169689Skan
4342169689Skan    case OMP_ORDERED:
4343169689Skan      ctx = maybe_lookup_ctx (t);
4344169689Skan      gcc_assert (ctx);
4345169689Skan      lower_omp_ordered (tp, ctx);
4346169689Skan      break;
4347169689Skan
4348169689Skan    case OMP_CRITICAL:
4349169689Skan      ctx = maybe_lookup_ctx (t);
4350169689Skan      gcc_assert (ctx);
4351169689Skan      lower_omp_critical (tp, ctx);
4352169689Skan      break;
4353169689Skan
4354169689Skan    case VAR_DECL:
4355169689Skan      if (ctx && DECL_HAS_VALUE_EXPR_P (t))
4356169689Skan	{
4357169689Skan	  lower_regimplify (&t, wi);
4358169689Skan	  if (wi->val_only)
4359169689Skan	    {
4360169689Skan	      if (wi->is_lhs)
4361169689Skan		t = save_tmp_var (t, &wi->tsi);
4362169689Skan	      else
4363169689Skan		t = init_tmp_var (t, &wi->tsi);
4364169689Skan	    }
4365169689Skan	  *tp = t;
4366169689Skan	}
4367169689Skan      break;
4368169689Skan
4369169689Skan    case ADDR_EXPR:
4370169689Skan      if (ctx)
4371169689Skan	lower_regimplify (tp, wi);
4372169689Skan      break;
4373169689Skan
4374169689Skan    case ARRAY_REF:
4375169689Skan    case ARRAY_RANGE_REF:
4376169689Skan    case REALPART_EXPR:
4377169689Skan    case IMAGPART_EXPR:
4378169689Skan    case COMPONENT_REF:
4379169689Skan    case VIEW_CONVERT_EXPR:
4380169689Skan      if (ctx)
4381169689Skan	lower_regimplify (tp, wi);
4382169689Skan      break;
4383169689Skan
4384169689Skan    case INDIRECT_REF:
4385169689Skan      if (ctx)
4386169689Skan	{
4387169689Skan	  wi->is_lhs = false;
4388169689Skan	  wi->val_only = true;
4389169689Skan	  lower_regimplify (&TREE_OPERAND (t, 0), wi);
4390169689Skan	}
4391169689Skan      break;
4392169689Skan
4393169689Skan    default:
4394169689Skan      if (!TYPE_P (t) && !DECL_P (t))
4395169689Skan	*walk_subtrees = 1;
4396169689Skan      break;
4397169689Skan    }
4398169689Skan
4399169689Skan  return NULL_TREE;
4400169689Skan}
4401169689Skan
4402169689Skanstatic void
4403169689Skanlower_omp (tree *stmt_p, omp_context *ctx)
4404169689Skan{
4405169689Skan  struct walk_stmt_info wi;
4406169689Skan
4407169689Skan  memset (&wi, 0, sizeof (wi));
4408169689Skan  wi.callback = lower_omp_1;
4409169689Skan  wi.info = ctx;
4410169689Skan  wi.val_only = true;
4411169689Skan  wi.want_locations = true;
4412169689Skan
4413169689Skan  walk_stmts (&wi, stmt_p);
4414169689Skan}
4415169689Skan
4416169689Skan/* Main entry point.  */
4417169689Skan
4418169689Skanstatic unsigned int
4419169689Skanexecute_lower_omp (void)
4420169689Skan{
4421169689Skan  all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
4422169689Skan				 delete_omp_context);
4423169689Skan
4424169689Skan  scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
4425169689Skan  gcc_assert (parallel_nesting_level == 0);
4426169689Skan
4427169689Skan  if (all_contexts->root)
4428169689Skan    lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
4429169689Skan
4430169689Skan  if (all_contexts)
4431169689Skan    {
4432169689Skan      splay_tree_delete (all_contexts);
4433169689Skan      all_contexts = NULL;
4434169689Skan    }
4435169689Skan  return 0;
4436169689Skan}
4437169689Skan
4438169689Skanstatic bool
4439169689Skangate_lower_omp (void)
4440169689Skan{
4441169689Skan  return flag_openmp != 0;
4442169689Skan}
4443169689Skan
4444169689Skanstruct tree_opt_pass pass_lower_omp =
4445169689Skan{
4446169689Skan  "omplower",				/* name */
4447169689Skan  gate_lower_omp,			/* gate */
4448169689Skan  execute_lower_omp,			/* execute */
4449169689Skan  NULL,					/* sub */
4450169689Skan  NULL,					/* next */
4451169689Skan  0,					/* static_pass_number */
4452169689Skan  0,					/* tv_id */
4453169689Skan  PROP_gimple_any,			/* properties_required */
4454169689Skan  PROP_gimple_lomp,			/* properties_provided */
4455169689Skan  0,					/* properties_destroyed */
4456169689Skan  0,					/* todo_flags_start */
4457169689Skan  TODO_dump_func,			/* todo_flags_finish */
4458169689Skan  0					/* letter */
4459169689Skan};
4460169689Skan
4461169689Skan/* The following is a utility to diagnose OpenMP structured block violations.
4462169689Skan   It is not part of the "omplower" pass, as that's invoked too late.  It
4463169689Skan   should be invoked by the respective front ends after gimplification.  */
4464169689Skan
4465169689Skanstatic splay_tree all_labels;
4466169689Skan
4467169689Skan/* Check for mismatched contexts and generate an error if needed.  Return
4468169689Skan   true if an error is detected.  */
4469169689Skan
4470169689Skanstatic bool
4471169689Skandiagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
4472169689Skan{
4473169689Skan  bool exit_p = true;
4474169689Skan
4475169689Skan  if ((label_ctx ? TREE_VALUE (label_ctx) : NULL) == branch_ctx)
4476169689Skan    return false;
4477169689Skan
4478169689Skan  /* Try to avoid confusing the user by producing and error message
4479169689Skan     with correct "exit" or "enter" verbage.  We prefer "exit"
4480169689Skan     unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
4481169689Skan  if (branch_ctx == NULL)
4482169689Skan    exit_p = false;
4483169689Skan  else
4484169689Skan    {
4485169689Skan      while (label_ctx)
4486169689Skan	{
4487169689Skan	  if (TREE_VALUE (label_ctx) == branch_ctx)
4488169689Skan	    {
4489169689Skan	      exit_p = false;
4490169689Skan	      break;
4491169689Skan	    }
4492169689Skan	  label_ctx = TREE_CHAIN (label_ctx);
4493169689Skan	}
4494169689Skan    }
4495169689Skan
4496169689Skan  if (exit_p)
4497169689Skan    error ("invalid exit from OpenMP structured block");
4498169689Skan  else
4499169689Skan    error ("invalid entry to OpenMP structured block");
4500169689Skan
4501169689Skan  *stmt_p = build_empty_stmt ();
4502169689Skan  return true;
4503169689Skan}
4504169689Skan
4505169689Skan/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
4506169689Skan   where in the tree each label is found.  */
4507169689Skan
4508169689Skanstatic tree
4509169689Skandiagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
4510169689Skan{
4511169689Skan  struct walk_stmt_info *wi = data;
4512169689Skan  tree context = (tree) wi->info;
4513169689Skan  tree inner_context;
4514169689Skan  tree t = *tp;
4515169689Skan
4516169689Skan  *walk_subtrees = 0;
4517169689Skan  switch (TREE_CODE (t))
4518169689Skan    {
4519169689Skan    case OMP_PARALLEL:
4520169689Skan    case OMP_SECTIONS:
4521169689Skan    case OMP_SINGLE:
4522169689Skan      walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
4523169689Skan      /* FALLTHRU */
4524169689Skan    case OMP_SECTION:
4525169689Skan    case OMP_MASTER:
4526169689Skan    case OMP_ORDERED:
4527169689Skan    case OMP_CRITICAL:
4528169689Skan      /* The minimal context here is just a tree of statements.  */
4529169689Skan      inner_context = tree_cons (NULL, t, context);
4530169689Skan      wi->info = inner_context;
4531169689Skan      walk_stmts (wi, &OMP_BODY (t));
4532169689Skan      wi->info = context;
4533169689Skan      break;
4534169689Skan
4535169689Skan    case OMP_FOR:
4536169689Skan      walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
4537169689Skan      inner_context = tree_cons (NULL, t, context);
4538169689Skan      wi->info = inner_context;
4539169689Skan      walk_tree (&OMP_FOR_INIT (t), diagnose_sb_1, wi, NULL);
4540169689Skan      walk_tree (&OMP_FOR_COND (t), diagnose_sb_1, wi, NULL);
4541169689Skan      walk_tree (&OMP_FOR_INCR (t), diagnose_sb_1, wi, NULL);
4542169689Skan      walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
4543169689Skan      walk_stmts (wi, &OMP_FOR_BODY (t));
4544169689Skan      wi->info = context;
4545169689Skan      break;
4546169689Skan
4547169689Skan    case LABEL_EXPR:
4548169689Skan      splay_tree_insert (all_labels, (splay_tree_key) LABEL_EXPR_LABEL (t),
4549169689Skan			 (splay_tree_value) context);
4550169689Skan      break;
4551169689Skan
4552169689Skan    default:
4553169689Skan      break;
4554169689Skan    }
4555169689Skan
4556169689Skan  return NULL_TREE;
4557169689Skan}
4558169689Skan
4559169689Skan/* Pass 2: Check each branch and see if its context differs from that of
4560169689Skan   the destination label's context.  */
4561169689Skan
4562169689Skanstatic tree
4563169689Skandiagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
4564169689Skan{
4565169689Skan  struct walk_stmt_info *wi = data;
4566169689Skan  tree context = (tree) wi->info;
4567169689Skan  splay_tree_node n;
4568169689Skan  tree t = *tp;
4569169689Skan
4570169689Skan  *walk_subtrees = 0;
4571169689Skan  switch (TREE_CODE (t))
4572169689Skan    {
4573169689Skan    case OMP_PARALLEL:
4574169689Skan    case OMP_SECTIONS:
4575169689Skan    case OMP_SINGLE:
4576169689Skan      walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
4577169689Skan      /* FALLTHRU */
4578169689Skan    case OMP_SECTION:
4579169689Skan    case OMP_MASTER:
4580169689Skan    case OMP_ORDERED:
4581169689Skan    case OMP_CRITICAL:
4582169689Skan      wi->info = t;
4583169689Skan      walk_stmts (wi, &OMP_BODY (t));
4584169689Skan      wi->info = context;
4585169689Skan      break;
4586169689Skan
4587169689Skan    case OMP_FOR:
4588169689Skan      walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
4589169689Skan      wi->info = t;
4590169689Skan      walk_tree (&OMP_FOR_INIT (t), diagnose_sb_2, wi, NULL);
4591169689Skan      walk_tree (&OMP_FOR_COND (t), diagnose_sb_2, wi, NULL);
4592169689Skan      walk_tree (&OMP_FOR_INCR (t), diagnose_sb_2, wi, NULL);
4593169689Skan      walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
4594169689Skan      walk_stmts (wi, &OMP_FOR_BODY (t));
4595169689Skan      wi->info = context;
4596169689Skan      break;
4597169689Skan
4598169689Skan    case GOTO_EXPR:
4599169689Skan      {
4600169689Skan	tree lab = GOTO_DESTINATION (t);
4601169689Skan	if (TREE_CODE (lab) != LABEL_DECL)
4602169689Skan	  break;
4603169689Skan
4604169689Skan	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
4605169689Skan	diagnose_sb_0 (tp, context, n ? (tree) n->value : NULL_TREE);
4606169689Skan      }
4607169689Skan      break;
4608169689Skan
4609169689Skan    case SWITCH_EXPR:
4610169689Skan      {
4611169689Skan	tree vec = SWITCH_LABELS (t);
4612169689Skan	int i, len = TREE_VEC_LENGTH (vec);
4613169689Skan	for (i = 0; i < len; ++i)
4614169689Skan	  {
4615169689Skan	    tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4616169689Skan	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
4617169689Skan	    if (diagnose_sb_0 (tp, context, (tree) n->value))
4618169689Skan	      break;
4619169689Skan	  }
4620169689Skan      }
4621169689Skan      break;
4622169689Skan
4623169689Skan    case RETURN_EXPR:
4624169689Skan      diagnose_sb_0 (tp, context, NULL_TREE);
4625169689Skan      break;
4626169689Skan
4627169689Skan    default:
4628169689Skan      break;
4629169689Skan    }
4630169689Skan
4631169689Skan  return NULL_TREE;
4632169689Skan}
4633169689Skan
4634169689Skanvoid
4635169689Skandiagnose_omp_structured_block_errors (tree fndecl)
4636169689Skan{
4637169689Skan  tree save_current = current_function_decl;
4638169689Skan  struct walk_stmt_info wi;
4639169689Skan
4640169689Skan  current_function_decl = fndecl;
4641169689Skan
4642169689Skan  all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
4643169689Skan
4644169689Skan  memset (&wi, 0, sizeof (wi));
4645169689Skan  wi.callback = diagnose_sb_1;
4646169689Skan  walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
4647169689Skan
4648169689Skan  memset (&wi, 0, sizeof (wi));
4649169689Skan  wi.callback = diagnose_sb_2;
4650169689Skan  wi.want_locations = true;
4651169689Skan  wi.want_return_expr = true;
4652169689Skan  walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
4653169689Skan
4654169689Skan  splay_tree_delete (all_labels);
4655169689Skan  all_labels = NULL;
4656169689Skan
4657169689Skan  current_function_decl = save_current;
4658169689Skan}
4659169689Skan
4660169689Skan#include "gt-omp-low.h"
4661