1 /* General types and functions that are uselful for processing of OpenMP,
2 OpenACC and similar directivers at various stages of compilation.
4 Copyright (C) 2005-2019 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Find an OMP clause of type KIND within CLAUSES. */
26 #include "coretypes.h"
32 #include "diagnostic-core.h"
33 #include "fold-const.h"
34 #include "langhooks.h"
35 #include "omp-general.h"
36 #include "stringpool.h"
39 enum omp_requires omp_requires_mask
;
42 omp_find_clause (tree clauses
, enum omp_clause_code kind
)
44 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
45 if (OMP_CLAUSE_CODE (clauses
) == kind
)
51 /* Return true if DECL is a reference type. */
54 omp_is_reference (tree decl
)
56 return lang_hooks
.decls
.omp_privatize_by_reference (decl
);
59 /* Adjust *COND_CODE and *N2 so that the former is either LT_EXPR or GT_EXPR,
60 given that V is the loop index variable and STEP is loop step. */
63 omp_adjust_for_condition (location_t loc
, enum tree_code
*cond_code
, tree
*n2
,
73 gcc_assert (TREE_CODE (step
) == INTEGER_CST
);
74 if (TREE_CODE (TREE_TYPE (v
)) == INTEGER_TYPE
)
76 if (integer_onep (step
))
80 gcc_assert (integer_minus_onep (step
));
86 tree unit
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (v
)));
87 gcc_assert (TREE_CODE (unit
) == INTEGER_CST
);
88 if (tree_int_cst_equal (unit
, step
))
92 gcc_assert (wi::neg (wi::to_widest (unit
))
93 == wi::to_widest (step
));
101 if (POINTER_TYPE_P (TREE_TYPE (*n2
)))
102 *n2
= fold_build_pointer_plus_hwi_loc (loc
, *n2
, 1);
104 *n2
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (*n2
), *n2
,
105 build_int_cst (TREE_TYPE (*n2
), 1));
106 *cond_code
= LT_EXPR
;
109 if (POINTER_TYPE_P (TREE_TYPE (*n2
)))
110 *n2
= fold_build_pointer_plus_hwi_loc (loc
, *n2
, -1);
112 *n2
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (*n2
), *n2
,
113 build_int_cst (TREE_TYPE (*n2
), 1));
114 *cond_code
= GT_EXPR
;
121 /* Return the looping step from INCR, extracted from the step of a gimple omp
125 omp_get_for_step_from_incr (location_t loc
, tree incr
)
128 switch (TREE_CODE (incr
))
131 step
= TREE_OPERAND (incr
, 1);
133 case POINTER_PLUS_EXPR
:
134 step
= fold_convert (ssizetype
, TREE_OPERAND (incr
, 1));
137 step
= TREE_OPERAND (incr
, 1);
138 step
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (step
), step
);
146 /* Extract the header elements of parallel loop FOR_STMT and store
150 omp_extract_for_data (gomp_for
*for_stmt
, struct omp_for_data
*fd
,
151 struct omp_for_data_loop
*loops
)
153 tree t
, var
, *collapse_iter
, *collapse_count
;
154 tree count
= NULL_TREE
, iter_type
= long_integer_type_node
;
155 struct omp_for_data_loop
*loop
;
157 struct omp_for_data_loop dummy_loop
;
158 location_t loc
= gimple_location (for_stmt
);
159 bool simd
= gimple_omp_for_kind (for_stmt
) & GF_OMP_FOR_SIMD
;
160 bool distribute
= gimple_omp_for_kind (for_stmt
)
161 == GF_OMP_FOR_KIND_DISTRIBUTE
;
162 bool taskloop
= gimple_omp_for_kind (for_stmt
)
163 == GF_OMP_FOR_KIND_TASKLOOP
;
166 fd
->for_stmt
= for_stmt
;
168 fd
->have_nowait
= distribute
|| simd
;
169 fd
->have_ordered
= false;
170 fd
->have_reductemp
= false;
171 fd
->tiling
= NULL_TREE
;
174 fd
->sched_kind
= OMP_CLAUSE_SCHEDULE_STATIC
;
175 fd
->sched_modifiers
= 0;
176 fd
->chunk_size
= NULL_TREE
;
177 fd
->simd_schedule
= false;
178 collapse_iter
= NULL
;
179 collapse_count
= NULL
;
181 for (t
= gimple_omp_for_clauses (for_stmt
); t
; t
= OMP_CLAUSE_CHAIN (t
))
182 switch (OMP_CLAUSE_CODE (t
))
184 case OMP_CLAUSE_NOWAIT
:
185 fd
->have_nowait
= true;
187 case OMP_CLAUSE_ORDERED
:
188 fd
->have_ordered
= true;
189 if (OMP_CLAUSE_ORDERED_EXPR (t
))
190 fd
->ordered
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (t
));
192 case OMP_CLAUSE_SCHEDULE
:
193 gcc_assert (!distribute
&& !taskloop
);
195 = (enum omp_clause_schedule_kind
)
196 (OMP_CLAUSE_SCHEDULE_KIND (t
) & OMP_CLAUSE_SCHEDULE_MASK
);
197 fd
->sched_modifiers
= (OMP_CLAUSE_SCHEDULE_KIND (t
)
198 & ~OMP_CLAUSE_SCHEDULE_MASK
);
199 fd
->chunk_size
= OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t
);
200 fd
->simd_schedule
= OMP_CLAUSE_SCHEDULE_SIMD (t
);
202 case OMP_CLAUSE_DIST_SCHEDULE
:
203 gcc_assert (distribute
);
204 fd
->chunk_size
= OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t
);
206 case OMP_CLAUSE_COLLAPSE
:
207 fd
->collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (t
));
208 if (fd
->collapse
> 1)
210 collapse_iter
= &OMP_CLAUSE_COLLAPSE_ITERVAR (t
);
211 collapse_count
= &OMP_CLAUSE_COLLAPSE_COUNT (t
);
214 case OMP_CLAUSE_TILE
:
215 fd
->tiling
= OMP_CLAUSE_TILE_LIST (t
);
216 fd
->collapse
= list_length (fd
->tiling
);
217 gcc_assert (fd
->collapse
);
218 collapse_iter
= &OMP_CLAUSE_TILE_ITERVAR (t
);
219 collapse_count
= &OMP_CLAUSE_TILE_COUNT (t
);
221 case OMP_CLAUSE__REDUCTEMP_
:
222 fd
->have_reductemp
= true;
227 if (fd
->collapse
> 1 || fd
->tiling
)
230 fd
->loops
= &fd
->loop
;
232 if (fd
->ordered
&& fd
->collapse
== 1 && loops
!= NULL
)
237 collapse_iter
= &iterv
;
238 collapse_count
= &countv
;
241 /* FIXME: for now map schedule(auto) to schedule(static).
242 There should be analysis to determine whether all iterations
243 are approximately the same amount of work (then schedule(static)
244 is best) or if it varies (then schedule(dynamic,N) is better). */
245 if (fd
->sched_kind
== OMP_CLAUSE_SCHEDULE_AUTO
)
247 fd
->sched_kind
= OMP_CLAUSE_SCHEDULE_STATIC
;
248 gcc_assert (fd
->chunk_size
== NULL
);
250 gcc_assert ((fd
->collapse
== 1 && !fd
->tiling
) || collapse_iter
!= NULL
);
252 fd
->sched_kind
= OMP_CLAUSE_SCHEDULE_RUNTIME
;
253 if (fd
->sched_kind
== OMP_CLAUSE_SCHEDULE_RUNTIME
)
254 gcc_assert (fd
->chunk_size
== NULL
);
255 else if (fd
->chunk_size
== NULL
)
257 /* We only need to compute a default chunk size for ordered
258 static loops and dynamic loops. */
259 if (fd
->sched_kind
!= OMP_CLAUSE_SCHEDULE_STATIC
261 fd
->chunk_size
= (fd
->sched_kind
== OMP_CLAUSE_SCHEDULE_STATIC
)
262 ? integer_zero_node
: integer_one_node
;
265 int cnt
= fd
->ordered
? fd
->ordered
: fd
->collapse
;
266 for (i
= 0; i
< cnt
; i
++)
271 && (fd
->ordered
== 0 || loops
== NULL
))
273 else if (loops
!= NULL
)
278 loop
->v
= gimple_omp_for_index (for_stmt
, i
);
279 gcc_assert (SSA_VAR_P (loop
->v
));
280 gcc_assert (TREE_CODE (TREE_TYPE (loop
->v
)) == INTEGER_TYPE
281 || TREE_CODE (TREE_TYPE (loop
->v
)) == POINTER_TYPE
);
282 var
= TREE_CODE (loop
->v
) == SSA_NAME
? SSA_NAME_VAR (loop
->v
) : loop
->v
;
283 loop
->n1
= gimple_omp_for_initial (for_stmt
, i
);
285 loop
->cond_code
= gimple_omp_for_cond (for_stmt
, i
);
286 loop
->n2
= gimple_omp_for_final (for_stmt
, i
);
287 gcc_assert (loop
->cond_code
!= NE_EXPR
288 || (gimple_omp_for_kind (for_stmt
)
289 != GF_OMP_FOR_KIND_OACC_LOOP
));
291 t
= gimple_omp_for_incr (for_stmt
, i
);
292 gcc_assert (TREE_OPERAND (t
, 0) == var
);
293 loop
->step
= omp_get_for_step_from_incr (loc
, t
);
295 omp_adjust_for_condition (loc
, &loop
->cond_code
, &loop
->n2
, loop
->v
,
299 || (fd
->sched_kind
== OMP_CLAUSE_SCHEDULE_STATIC
300 && !fd
->have_ordered
))
302 if (fd
->collapse
== 1 && !fd
->tiling
)
303 iter_type
= TREE_TYPE (loop
->v
);
305 || TYPE_PRECISION (iter_type
)
306 < TYPE_PRECISION (TREE_TYPE (loop
->v
)))
308 = build_nonstandard_integer_type
309 (TYPE_PRECISION (TREE_TYPE (loop
->v
)), 1);
311 else if (iter_type
!= long_long_unsigned_type_node
)
313 if (POINTER_TYPE_P (TREE_TYPE (loop
->v
)))
314 iter_type
= long_long_unsigned_type_node
;
315 else if (TYPE_UNSIGNED (TREE_TYPE (loop
->v
))
316 && TYPE_PRECISION (TREE_TYPE (loop
->v
))
317 >= TYPE_PRECISION (iter_type
))
321 if (loop
->cond_code
== LT_EXPR
)
322 n
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (loop
->v
),
323 loop
->n2
, loop
->step
);
326 if (TREE_CODE (n
) != INTEGER_CST
327 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type
), n
))
328 iter_type
= long_long_unsigned_type_node
;
330 else if (TYPE_PRECISION (TREE_TYPE (loop
->v
))
331 > TYPE_PRECISION (iter_type
))
335 if (loop
->cond_code
== LT_EXPR
)
338 n2
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (loop
->v
),
339 loop
->n2
, loop
->step
);
343 n1
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (loop
->v
),
344 loop
->n2
, loop
->step
);
347 if (TREE_CODE (n1
) != INTEGER_CST
348 || TREE_CODE (n2
) != INTEGER_CST
349 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type
), n1
)
350 || !tree_int_cst_lt (n2
, TYPE_MAX_VALUE (iter_type
)))
351 iter_type
= long_long_unsigned_type_node
;
355 if (i
>= fd
->collapse
)
358 if (collapse_count
&& *collapse_count
== NULL
)
360 t
= fold_binary (loop
->cond_code
, boolean_type_node
,
361 fold_convert (TREE_TYPE (loop
->v
), loop
->n1
),
362 fold_convert (TREE_TYPE (loop
->v
), loop
->n2
));
363 if (t
&& integer_zerop (t
))
364 count
= build_zero_cst (long_long_unsigned_type_node
);
365 else if ((i
== 0 || count
!= NULL_TREE
)
366 && TREE_CODE (TREE_TYPE (loop
->v
)) == INTEGER_TYPE
367 && TREE_CONSTANT (loop
->n1
)
368 && TREE_CONSTANT (loop
->n2
)
369 && TREE_CODE (loop
->step
) == INTEGER_CST
)
371 tree itype
= TREE_TYPE (loop
->v
);
373 if (POINTER_TYPE_P (itype
))
374 itype
= signed_type_for (itype
);
375 t
= build_int_cst (itype
, (loop
->cond_code
== LT_EXPR
? -1 : 1));
376 t
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
377 fold_convert_loc (loc
, itype
, loop
->step
),
379 t
= fold_build2_loc (loc
, PLUS_EXPR
, itype
, t
,
380 fold_convert_loc (loc
, itype
, loop
->n2
));
381 t
= fold_build2_loc (loc
, MINUS_EXPR
, itype
, t
,
382 fold_convert_loc (loc
, itype
, loop
->n1
));
383 if (TYPE_UNSIGNED (itype
) && loop
->cond_code
== GT_EXPR
)
385 tree step
= fold_convert_loc (loc
, itype
, loop
->step
);
386 t
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, itype
,
387 fold_build1_loc (loc
, NEGATE_EXPR
,
389 fold_build1_loc (loc
, NEGATE_EXPR
,
393 t
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, itype
, t
,
394 fold_convert_loc (loc
, itype
,
396 t
= fold_convert_loc (loc
, long_long_unsigned_type_node
, t
);
397 if (count
!= NULL_TREE
)
398 count
= fold_build2_loc (loc
, MULT_EXPR
,
399 long_long_unsigned_type_node
,
403 if (TREE_CODE (count
) != INTEGER_CST
)
406 else if (count
&& !integer_zerop (count
))
413 && (fd
->sched_kind
!= OMP_CLAUSE_SCHEDULE_STATIC
414 || fd
->have_ordered
))
416 if (!tree_int_cst_lt (count
, TYPE_MAX_VALUE (long_integer_type_node
)))
417 iter_type
= long_long_unsigned_type_node
;
419 iter_type
= long_integer_type_node
;
421 else if (collapse_iter
&& *collapse_iter
!= NULL
)
422 iter_type
= TREE_TYPE (*collapse_iter
);
423 fd
->iter_type
= iter_type
;
424 if (collapse_iter
&& *collapse_iter
== NULL
)
425 *collapse_iter
= create_tmp_var (iter_type
, ".iter");
426 if (collapse_count
&& *collapse_count
== NULL
)
429 *collapse_count
= fold_convert_loc (loc
, iter_type
, count
);
431 *collapse_count
= create_tmp_var (iter_type
, ".count");
434 if (fd
->collapse
> 1 || fd
->tiling
|| (fd
->ordered
&& loops
))
436 fd
->loop
.v
= *collapse_iter
;
437 fd
->loop
.n1
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
438 fd
->loop
.n2
= *collapse_count
;
439 fd
->loop
.step
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 1);
440 fd
->loop
.cond_code
= LT_EXPR
;
446 /* Build a call to GOMP_barrier. */
449 omp_build_barrier (tree lhs
)
451 tree fndecl
= builtin_decl_explicit (lhs
? BUILT_IN_GOMP_BARRIER_CANCEL
452 : BUILT_IN_GOMP_BARRIER
);
453 gcall
*g
= gimple_build_call (fndecl
, 0);
455 gimple_call_set_lhs (g
, lhs
);
459 /* Return maximum possible vectorization factor for the target. */
466 || !flag_tree_loop_optimize
467 || (!flag_tree_loop_vectorize
468 && global_options_set
.x_flag_tree_loop_vectorize
))
471 auto_vector_sizes sizes
;
472 targetm
.vectorize
.autovectorize_vector_sizes (&sizes
);
473 if (!sizes
.is_empty ())
476 for (unsigned int i
= 0; i
< sizes
.length (); ++i
)
477 vf
= ordered_max (vf
, sizes
[i
]);
481 machine_mode vqimode
= targetm
.vectorize
.preferred_simd_mode (QImode
);
482 if (GET_MODE_CLASS (vqimode
) == MODE_VECTOR_INT
)
483 return GET_MODE_NUNITS (vqimode
);
488 /* Return maximum SIMT width if offloading may target SIMT hardware. */
491 omp_max_simt_vf (void)
495 if (ENABLE_OFFLOADING
)
496 for (const char *c
= getenv ("OFFLOAD_TARGET_NAMES"); c
;)
498 if (!strncmp (c
, "nvptx", strlen ("nvptx")))
500 else if ((c
= strchr (c
, ',')))
506 /* Encode an oacc launch argument. This matches the GOMP_LAUNCH_PACK
507 macro on gomp-constants.h. We do not check for overflow. */
510 oacc_launch_pack (unsigned code
, tree device
, unsigned op
)
514 res
= build_int_cst (unsigned_type_node
, GOMP_LAUNCH_PACK (code
, 0, op
));
517 device
= fold_build2 (LSHIFT_EXPR
, unsigned_type_node
,
518 device
, build_int_cst (unsigned_type_node
,
519 GOMP_LAUNCH_DEVICE_SHIFT
));
520 res
= fold_build2 (BIT_IOR_EXPR
, unsigned_type_node
, res
, device
);
525 /* FIXME: What is the following comment for? */
526 /* Look for compute grid dimension clauses and convert to an attribute
527 attached to FN. This permits the target-side code to (a) massage
528 the dimensions, (b) emit that data and (c) optimize. Non-constant
529 dimensions are pushed onto ARGS.
531 The attribute value is a TREE_LIST. A set of dimensions is
532 represented as a list of INTEGER_CST. Those that are runtime
533 exprs are represented as an INTEGER_CST of zero.
535 TODO: Normally the attribute will just contain a single such list. If
536 however it contains a list of lists, this will represent the use of
537 device_type. Each member of the outer list is an assoc list of
538 dimensions, keyed by the device type. The first entry will be the
539 default. Well, that's the plan. */
541 /* Replace any existing oacc fn attribute with updated dimensions. */
544 oacc_replace_fn_attrib (tree fn
, tree dims
)
546 tree ident
= get_identifier (OACC_FN_ATTRIB
);
547 tree attribs
= DECL_ATTRIBUTES (fn
);
549 /* If we happen to be present as the first attrib, drop it. */
550 if (attribs
&& TREE_PURPOSE (attribs
) == ident
)
551 attribs
= TREE_CHAIN (attribs
);
552 DECL_ATTRIBUTES (fn
) = tree_cons (ident
, dims
, attribs
);
555 /* Scan CLAUSES for launch dimensions and attach them to the oacc
556 function attribute. Push any that are non-constant onto the ARGS
557 list, along with an appropriate GOMP_LAUNCH_DIM tag. */
560 oacc_set_fn_attrib (tree fn
, tree clauses
, vec
<tree
> *args
)
562 /* Must match GOMP_DIM ordering. */
563 static const omp_clause_code ids
[]
564 = { OMP_CLAUSE_NUM_GANGS
, OMP_CLAUSE_NUM_WORKERS
,
565 OMP_CLAUSE_VECTOR_LENGTH
};
567 tree dims
[GOMP_DIM_MAX
];
569 tree attr
= NULL_TREE
;
570 unsigned non_const
= 0;
572 for (ix
= GOMP_DIM_MAX
; ix
--;)
574 tree clause
= omp_find_clause (clauses
, ids
[ix
]);
575 tree dim
= NULL_TREE
;
578 dim
= OMP_CLAUSE_EXPR (clause
, ids
[ix
]);
580 if (dim
&& TREE_CODE (dim
) != INTEGER_CST
)
582 dim
= integer_zero_node
;
583 non_const
|= GOMP_DIM_MASK (ix
);
585 attr
= tree_cons (NULL_TREE
, dim
, attr
);
588 oacc_replace_fn_attrib (fn
, attr
);
592 /* Push a dynamic argument set. */
593 args
->safe_push (oacc_launch_pack (GOMP_LAUNCH_DIM
,
594 NULL_TREE
, non_const
));
595 for (unsigned ix
= 0; ix
!= GOMP_DIM_MAX
; ix
++)
596 if (non_const
& GOMP_DIM_MASK (ix
))
597 args
->safe_push (dims
[ix
]);
601 /* Process the routine's dimension clauess to generate an attribute
602 value. Issue diagnostics as appropriate. We default to SEQ
603 (OpenACC 2.5 clarifies this). All dimensions have a size of zero
604 (dynamic). TREE_PURPOSE is set to indicate whether that dimension
605 can have a loop partitioned on it. non-zero indicates
606 yes, zero indicates no. By construction once a non-zero has been
607 reached, further inner dimensions must also be non-zero. We set
608 TREE_VALUE to zero for the dimensions that may be partitioned and
609 1 for the other ones -- if a loop is (erroneously) spawned at
610 an outer level, we don't want to try and partition it. */
613 oacc_build_routine_dims (tree clauses
)
615 /* Must match GOMP_DIM ordering. */
616 static const omp_clause_code ids
[]
617 = {OMP_CLAUSE_GANG
, OMP_CLAUSE_WORKER
, OMP_CLAUSE_VECTOR
, OMP_CLAUSE_SEQ
};
621 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
622 for (ix
= GOMP_DIM_MAX
+ 1; ix
--;)
623 if (OMP_CLAUSE_CODE (clauses
) == ids
[ix
])
626 error_at (OMP_CLAUSE_LOCATION (clauses
),
627 "multiple loop axes specified for routine");
632 /* Default to SEQ. */
634 level
= GOMP_DIM_MAX
;
636 tree dims
= NULL_TREE
;
638 for (ix
= GOMP_DIM_MAX
; ix
--;)
639 dims
= tree_cons (build_int_cst (boolean_type_node
, ix
>= level
),
640 build_int_cst (integer_type_node
, ix
< level
), dims
);
645 /* Retrieve the oacc function attrib and return it. Non-oacc
646 functions will return NULL. */
649 oacc_get_fn_attrib (tree fn
)
651 return lookup_attribute (OACC_FN_ATTRIB
, DECL_ATTRIBUTES (fn
));
654 /* Return true if FN is an OpenMP or OpenACC offloading function. */
657 offloading_function_p (tree fn
)
659 tree attrs
= DECL_ATTRIBUTES (fn
);
660 return (lookup_attribute ("omp declare target", attrs
)
661 || lookup_attribute ("omp target entrypoint", attrs
));
664 /* Extract an oacc execution dimension from FN. FN must be an
665 offloaded function or routine that has already had its execution
666 dimensions lowered to the target-specific values. */
669 oacc_get_fn_dim_size (tree fn
, int axis
)
671 tree attrs
= oacc_get_fn_attrib (fn
);
673 gcc_assert (axis
< GOMP_DIM_MAX
);
675 tree dims
= TREE_VALUE (attrs
);
677 dims
= TREE_CHAIN (dims
);
679 int size
= TREE_INT_CST_LOW (TREE_VALUE (dims
));
684 /* Extract the dimension axis from an IFN_GOACC_DIM_POS or
685 IFN_GOACC_DIM_SIZE call. */
688 oacc_get_ifn_dim_arg (const gimple
*stmt
)
690 gcc_checking_assert (gimple_call_internal_fn (stmt
) == IFN_GOACC_DIM_SIZE
691 || gimple_call_internal_fn (stmt
) == IFN_GOACC_DIM_POS
);
692 tree arg
= gimple_call_arg (stmt
, 0);
693 HOST_WIDE_INT axis
= TREE_INT_CST_LOW (arg
);
695 gcc_checking_assert (axis
>= 0 && axis
< GOMP_DIM_MAX
);