2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[gcc.git] / gcc / internal-fn.c
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "insn-codes.h"
28 #include "optabs.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "machmode.h"
34 #include "tm.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "ubsan.h"
47 #include "target.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "diagnostic-core.h"
51
52 /* The names of each internal function, indexed by function number. */
53 const char *const internal_fn_name_array[] = {
54 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
55 #include "internal-fn.def"
56 #undef DEF_INTERNAL_FN
57 "<invalid-fn>"
58 };
59
60 /* The ECF_* flags of each internal function, indexed by function number. */
61 const int internal_fn_flags_array[] = {
62 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
63 #include "internal-fn.def"
64 #undef DEF_INTERNAL_FN
65 0
66 };
67
68 /* Fnspec of each internal function, indexed by function number. */
69 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
70
71 void
72 init_internal_fns ()
73 {
74 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
75 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
76 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
77 #include "internal-fn.def"
78 #undef DEF_INTERNAL_FN
79 internal_fn_fnspec_array[IFN_LAST] = 0;
80 }
81
82 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
83 for load-lanes-style optab OPTAB. The insn must exist. */
84
85 static enum insn_code
86 get_multi_vector_move (tree array_type, convert_optab optab)
87 {
88 enum insn_code icode;
89 machine_mode imode;
90 machine_mode vmode;
91
92 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
93 imode = TYPE_MODE (array_type);
94 vmode = TYPE_MODE (TREE_TYPE (array_type));
95
96 icode = convert_optab_handler (optab, imode, vmode);
97 gcc_assert (icode != CODE_FOR_nothing);
98 return icode;
99 }
100
101 /* Expand LOAD_LANES call STMT. */
102
103 static void
104 expand_LOAD_LANES (gimple stmt)
105 {
106 struct expand_operand ops[2];
107 tree type, lhs, rhs;
108 rtx target, mem;
109
110 lhs = gimple_call_lhs (stmt);
111 rhs = gimple_call_arg (stmt, 0);
112 type = TREE_TYPE (lhs);
113
114 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
115 mem = expand_normal (rhs);
116
117 gcc_assert (MEM_P (mem));
118 PUT_MODE (mem, TYPE_MODE (type));
119
120 create_output_operand (&ops[0], target, TYPE_MODE (type));
121 create_fixed_operand (&ops[1], mem);
122 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
123 }
124
125 /* Expand STORE_LANES call STMT. */
126
127 static void
128 expand_STORE_LANES (gimple stmt)
129 {
130 struct expand_operand ops[2];
131 tree type, lhs, rhs;
132 rtx target, reg;
133
134 lhs = gimple_call_lhs (stmt);
135 rhs = gimple_call_arg (stmt, 0);
136 type = TREE_TYPE (rhs);
137
138 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
139 reg = expand_normal (rhs);
140
141 gcc_assert (MEM_P (target));
142 PUT_MODE (target, TYPE_MODE (type));
143
144 create_fixed_operand (&ops[0], target);
145 create_input_operand (&ops[1], reg, TYPE_MODE (type));
146 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
147 }
148
149 static void
150 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
151 {
152 gcc_unreachable ();
153 }
154
155 /* This should get expanded in adjust_simduid_builtins. */
156
157 static void
158 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
159 {
160 gcc_unreachable ();
161 }
162
163 /* This should get expanded in adjust_simduid_builtins. */
164
165 static void
166 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
167 {
168 gcc_unreachable ();
169 }
170
171 /* This should get expanded in adjust_simduid_builtins. */
172
173 static void
174 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
175 {
176 gcc_unreachable ();
177 }
178
179 /* This should get expanded in the sanopt pass. */
180
181 static void
182 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
183 {
184 gcc_unreachable ();
185 }
186
187 /* This should get expanded in the sanopt pass. */
188
189 static void
190 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
191 {
192 gcc_unreachable ();
193 }
194
195 /* This should get expanded in the sanopt pass. */
196
197 static void
198 expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED)
199 {
200 gcc_unreachable ();
201 }
202
203 /* This should get expanded in the sanopt pass. */
204
205 static void
206 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
207 {
208 gcc_unreachable ();
209 }
210
211 /* Add sub/add overflow checking to the statement STMT.
212 CODE says whether the operation is +, or -. */
213
214 void
215 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
216 {
217 rtx res, op0, op1;
218 tree lhs, fn, arg0, arg1;
219 rtx_code_label *done_label, *do_error;
220 rtx target = NULL_RTX;
221
222 lhs = gimple_call_lhs (stmt);
223 arg0 = gimple_call_arg (stmt, 0);
224 arg1 = gimple_call_arg (stmt, 1);
225 done_label = gen_label_rtx ();
226 do_error = gen_label_rtx ();
227 do_pending_stack_adjust ();
228 op0 = expand_normal (arg0);
229 op1 = expand_normal (arg1);
230
231 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
232 if (lhs)
233 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
234
235 enum insn_code icode
236 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
237 if (icode != CODE_FOR_nothing)
238 {
239 struct expand_operand ops[4];
240 rtx_insn *last = get_last_insn ();
241
242 res = gen_reg_rtx (mode);
243 create_output_operand (&ops[0], res, mode);
244 create_input_operand (&ops[1], op0, mode);
245 create_input_operand (&ops[2], op1, mode);
246 create_fixed_operand (&ops[3], do_error);
247 if (maybe_expand_insn (icode, 4, ops))
248 {
249 last = get_last_insn ();
250 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
251 && JUMP_P (last)
252 && any_condjump_p (last)
253 && !find_reg_note (last, REG_BR_PROB, 0))
254 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
255 emit_jump (done_label);
256 }
257 else
258 {
259 delete_insns_since (last);
260 icode = CODE_FOR_nothing;
261 }
262 }
263
264 if (icode == CODE_FOR_nothing)
265 {
266 rtx_code_label *sub_check = gen_label_rtx ();
267 int pos_neg = 3;
268
269 /* Compute the operation. On RTL level, the addition is always
270 unsigned. */
271 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
272 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
273
274 /* If we can prove one of the arguments (for MINUS_EXPR only
275 the second operand, as subtraction is not commutative) is always
276 non-negative or always negative, we can do just one comparison
277 and conditional jump instead of 2 at runtime, 3 present in the
278 emitted code. If one of the arguments is CONST_INT, all we
279 need is to make sure it is op1, then the first
280 emit_cmp_and_jump_insns will be just folded. Otherwise try
281 to use range info if available. */
282 if (code == PLUS_EXPR && CONST_INT_P (op0))
283 {
284 rtx tem = op0;
285 op0 = op1;
286 op1 = tem;
287 }
288 else if (CONST_INT_P (op1))
289 ;
290 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
291 {
292 wide_int arg0_min, arg0_max;
293 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
294 {
295 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
296 pos_neg = 1;
297 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
298 pos_neg = 2;
299 }
300 if (pos_neg != 3)
301 {
302 rtx tem = op0;
303 op0 = op1;
304 op1 = tem;
305 }
306 }
307 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
308 {
309 wide_int arg1_min, arg1_max;
310 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
311 {
312 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
313 pos_neg = 1;
314 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
315 pos_neg = 2;
316 }
317 }
318
319 /* If the op1 is negative, we have to use a different check. */
320 if (pos_neg == 3)
321 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
322 false, sub_check, PROB_EVEN);
323
324 /* Compare the result of the operation with one of the operands. */
325 if (pos_neg & 1)
326 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
327 NULL_RTX, mode, false, done_label,
328 PROB_VERY_LIKELY);
329
330 /* If we get here, we have to print the error. */
331 if (pos_neg == 3)
332 {
333 emit_jump (do_error);
334
335 emit_label (sub_check);
336 }
337
338 /* We have k = a + b for b < 0 here. k <= a must hold. */
339 if (pos_neg & 2)
340 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
341 NULL_RTX, mode, false, done_label,
342 PROB_VERY_LIKELY);
343 }
344
345 emit_label (do_error);
346 /* Expand the ubsan builtin call. */
347 push_temp_slots ();
348 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
349 TREE_TYPE (arg0), arg0, arg1);
350 expand_normal (fn);
351 pop_temp_slots ();
352 do_pending_stack_adjust ();
353
354 /* We're done. */
355 emit_label (done_label);
356
357 if (lhs)
358 emit_move_insn (target, res);
359 }
360
361 /* Add negate overflow checking to the statement STMT. */
362
363 void
364 ubsan_expand_si_overflow_neg_check (gimple stmt)
365 {
366 rtx res, op1;
367 tree lhs, fn, arg1;
368 rtx_code_label *done_label, *do_error;
369 rtx target = NULL_RTX;
370
371 lhs = gimple_call_lhs (stmt);
372 arg1 = gimple_call_arg (stmt, 1);
373 done_label = gen_label_rtx ();
374 do_error = gen_label_rtx ();
375
376 do_pending_stack_adjust ();
377 op1 = expand_normal (arg1);
378
379 machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
380 if (lhs)
381 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
382
383 enum insn_code icode = optab_handler (negv3_optab, mode);
384 if (icode != CODE_FOR_nothing)
385 {
386 struct expand_operand ops[3];
387 rtx_insn *last = get_last_insn ();
388
389 res = gen_reg_rtx (mode);
390 create_output_operand (&ops[0], res, mode);
391 create_input_operand (&ops[1], op1, mode);
392 create_fixed_operand (&ops[2], do_error);
393 if (maybe_expand_insn (icode, 3, ops))
394 {
395 last = get_last_insn ();
396 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
397 && JUMP_P (last)
398 && any_condjump_p (last)
399 && !find_reg_note (last, REG_BR_PROB, 0))
400 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
401 emit_jump (done_label);
402 }
403 else
404 {
405 delete_insns_since (last);
406 icode = CODE_FOR_nothing;
407 }
408 }
409
410 if (icode == CODE_FOR_nothing)
411 {
412 /* Compute the operation. On RTL level, the addition is always
413 unsigned. */
414 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
415
416 /* Compare the operand with the most negative value. */
417 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
418 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
419 done_label, PROB_VERY_LIKELY);
420 }
421
422 emit_label (do_error);
423 /* Expand the ubsan builtin call. */
424 push_temp_slots ();
425 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
426 TREE_TYPE (arg1), arg1, NULL_TREE);
427 expand_normal (fn);
428 pop_temp_slots ();
429 do_pending_stack_adjust ();
430
431 /* We're done. */
432 emit_label (done_label);
433
434 if (lhs)
435 emit_move_insn (target, res);
436 }
437
438 /* Add mul overflow checking to the statement STMT. */
439
440 void
441 ubsan_expand_si_overflow_mul_check (gimple stmt)
442 {
443 rtx res, op0, op1;
444 tree lhs, fn, arg0, arg1;
445 rtx_code_label *done_label, *do_error;
446 rtx target = NULL_RTX;
447
448 lhs = gimple_call_lhs (stmt);
449 arg0 = gimple_call_arg (stmt, 0);
450 arg1 = gimple_call_arg (stmt, 1);
451 done_label = gen_label_rtx ();
452 do_error = gen_label_rtx ();
453
454 do_pending_stack_adjust ();
455 op0 = expand_normal (arg0);
456 op1 = expand_normal (arg1);
457
458 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
459 if (lhs)
460 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
461
462 enum insn_code icode = optab_handler (mulv4_optab, mode);
463 if (icode != CODE_FOR_nothing)
464 {
465 struct expand_operand ops[4];
466 rtx_insn *last = get_last_insn ();
467
468 res = gen_reg_rtx (mode);
469 create_output_operand (&ops[0], res, mode);
470 create_input_operand (&ops[1], op0, mode);
471 create_input_operand (&ops[2], op1, mode);
472 create_fixed_operand (&ops[3], do_error);
473 if (maybe_expand_insn (icode, 4, ops))
474 {
475 last = get_last_insn ();
476 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
477 && JUMP_P (last)
478 && any_condjump_p (last)
479 && !find_reg_note (last, REG_BR_PROB, 0))
480 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
481 emit_jump (done_label);
482 }
483 else
484 {
485 delete_insns_since (last);
486 icode = CODE_FOR_nothing;
487 }
488 }
489
490 if (icode == CODE_FOR_nothing)
491 {
492 struct separate_ops ops;
493 machine_mode hmode
494 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
495 ops.op0 = arg0;
496 ops.op1 = arg1;
497 ops.op2 = NULL_TREE;
498 ops.location = gimple_location (stmt);
499 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
500 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
501 {
502 machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
503 ops.code = WIDEN_MULT_EXPR;
504 ops.type
505 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
506
507 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
508 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
509 GET_MODE_PRECISION (mode), NULL_RTX, 0);
510 hipart = gen_lowpart (mode, hipart);
511 res = gen_lowpart (mode, res);
512 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
513 GET_MODE_PRECISION (mode) - 1,
514 NULL_RTX, 0);
515 /* RES is low half of the double width result, HIPART
516 the high half. There was overflow if
517 HIPART is different from RES < 0 ? -1 : 0. */
518 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
519 false, done_label, PROB_VERY_LIKELY);
520 }
521 else if (hmode != BLKmode
522 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
523 {
524 rtx_code_label *large_op0 = gen_label_rtx ();
525 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
526 rtx_code_label *one_small_one_large = gen_label_rtx ();
527 rtx_code_label *both_ops_large = gen_label_rtx ();
528 rtx_code_label *after_hipart_neg = gen_label_rtx ();
529 rtx_code_label *after_lopart_neg = gen_label_rtx ();
530 rtx_code_label *do_overflow = gen_label_rtx ();
531 rtx_code_label *hipart_different = gen_label_rtx ();
532
533 unsigned int hprec = GET_MODE_PRECISION (hmode);
534 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
535 NULL_RTX, 0);
536 hipart0 = gen_lowpart (hmode, hipart0);
537 rtx lopart0 = gen_lowpart (hmode, op0);
538 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
539 NULL_RTX, 0);
540 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
541 NULL_RTX, 0);
542 hipart1 = gen_lowpart (hmode, hipart1);
543 rtx lopart1 = gen_lowpart (hmode, op1);
544 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
545 NULL_RTX, 0);
546
547 res = gen_reg_rtx (mode);
548
549 /* True if op0 resp. op1 are known to be in the range of
550 halfstype. */
551 bool op0_small_p = false;
552 bool op1_small_p = false;
553 /* True if op0 resp. op1 are known to have all zeros or all ones
554 in the upper half of bits, but are not known to be
555 op{0,1}_small_p. */
556 bool op0_medium_p = false;
557 bool op1_medium_p = false;
558 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
559 nonnegative, 1 if unknown. */
560 int op0_sign = 1;
561 int op1_sign = 1;
562
563 if (TREE_CODE (arg0) == SSA_NAME)
564 {
565 wide_int arg0_min, arg0_max;
566 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
567 {
568 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
569 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
570 if (mprec0 <= hprec && mprec1 <= hprec)
571 op0_small_p = true;
572 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
573 op0_medium_p = true;
574 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
575 op0_sign = 0;
576 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
577 op0_sign = -1;
578 }
579 }
580 if (TREE_CODE (arg1) == SSA_NAME)
581 {
582 wide_int arg1_min, arg1_max;
583 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
584 {
585 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
586 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
587 if (mprec0 <= hprec && mprec1 <= hprec)
588 op1_small_p = true;
589 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
590 op1_medium_p = true;
591 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
592 op1_sign = 0;
593 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
594 op1_sign = -1;
595 }
596 }
597
598 int smaller_sign = 1;
599 int larger_sign = 1;
600 if (op0_small_p)
601 {
602 smaller_sign = op0_sign;
603 larger_sign = op1_sign;
604 }
605 else if (op1_small_p)
606 {
607 smaller_sign = op1_sign;
608 larger_sign = op0_sign;
609 }
610 else if (op0_sign == op1_sign)
611 {
612 smaller_sign = op0_sign;
613 larger_sign = op0_sign;
614 }
615
616 if (!op0_small_p)
617 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
618 false, large_op0, PROB_UNLIKELY);
619
620 if (!op1_small_p)
621 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
622 false, small_op0_large_op1,
623 PROB_UNLIKELY);
624
625 /* If both op0 and op1 are sign extended from hmode to mode,
626 the multiplication will never overflow. We can do just one
627 hmode x hmode => mode widening multiplication. */
628 if (GET_CODE (lopart0) == SUBREG)
629 {
630 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
631 SUBREG_PROMOTED_SET (lopart0, 0);
632 }
633 if (GET_CODE (lopart1) == SUBREG)
634 {
635 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
636 SUBREG_PROMOTED_SET (lopart1, 0);
637 }
638 tree halfstype = build_nonstandard_integer_type (hprec, 0);
639 ops.op0 = make_tree (halfstype, lopart0);
640 ops.op1 = make_tree (halfstype, lopart1);
641 ops.code = WIDEN_MULT_EXPR;
642 ops.type = TREE_TYPE (arg0);
643 rtx thisres
644 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
645 emit_move_insn (res, thisres);
646 emit_jump (done_label);
647
648 emit_label (small_op0_large_op1);
649
650 /* If op0 is sign extended from hmode to mode, but op1 is not,
651 just swap the arguments and handle it as op1 sign extended,
652 op0 not. */
653 rtx larger = gen_reg_rtx (mode);
654 rtx hipart = gen_reg_rtx (hmode);
655 rtx lopart = gen_reg_rtx (hmode);
656 emit_move_insn (larger, op1);
657 emit_move_insn (hipart, hipart1);
658 emit_move_insn (lopart, lopart0);
659 emit_jump (one_small_one_large);
660
661 emit_label (large_op0);
662
663 if (!op1_small_p)
664 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
665 false, both_ops_large, PROB_UNLIKELY);
666
667 /* If op1 is sign extended from hmode to mode, but op0 is not,
668 prepare larger, hipart and lopart pseudos and handle it together
669 with small_op0_large_op1. */
670 emit_move_insn (larger, op0);
671 emit_move_insn (hipart, hipart0);
672 emit_move_insn (lopart, lopart1);
673
674 emit_label (one_small_one_large);
675
676 /* lopart is the low part of the operand that is sign extended
677 to mode, larger is the the other operand, hipart is the
678 high part of larger and lopart0 and lopart1 are the low parts
679 of both operands.
680 We perform lopart0 * lopart1 and lopart * hipart widening
681 multiplications. */
682 tree halfutype = build_nonstandard_integer_type (hprec, 1);
683 ops.op0 = make_tree (halfutype, lopart0);
684 ops.op1 = make_tree (halfutype, lopart1);
685 rtx lo0xlo1
686 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
687
688 ops.op0 = make_tree (halfutype, lopart);
689 ops.op1 = make_tree (halfutype, hipart);
690 rtx loxhi = gen_reg_rtx (mode);
691 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
692 emit_move_insn (loxhi, tem);
693
694 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
695 if (larger_sign == 0)
696 emit_jump (after_hipart_neg);
697 else if (larger_sign != -1)
698 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
699 false, after_hipart_neg, PROB_EVEN);
700
701 tem = convert_modes (mode, hmode, lopart, 1);
702 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
703 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
704 1, OPTAB_DIRECT);
705 emit_move_insn (loxhi, tem);
706
707 emit_label (after_hipart_neg);
708
709 /* if (lopart < 0) loxhi -= larger; */
710 if (smaller_sign == 0)
711 emit_jump (after_lopart_neg);
712 else if (smaller_sign != -1)
713 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
714 false, after_lopart_neg, PROB_EVEN);
715
716 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
717 1, OPTAB_DIRECT);
718 emit_move_insn (loxhi, tem);
719
720 emit_label (after_lopart_neg);
721
722 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
723 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
724 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
725 1, OPTAB_DIRECT);
726 emit_move_insn (loxhi, tem);
727
728 /* if (loxhi >> (bitsize / 2)
729 == (hmode) loxhi >> (bitsize / 2 - 1)) */
730 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
731 NULL_RTX, 0);
732 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
733 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
734 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
735 hprec - 1, NULL_RTX, 0);
736
737 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
738 hmode, false, do_overflow,
739 PROB_VERY_UNLIKELY);
740
741 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
742 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
743 NULL_RTX, 1);
744 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
745
746 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
747 1, OPTAB_DIRECT);
748 if (tem != res)
749 emit_move_insn (res, tem);
750 emit_jump (done_label);
751
752 emit_label (both_ops_large);
753
754 /* If both operands are large (not sign extended from hmode),
755 then perform the full multiplication which will be the result
756 of the operation. The only cases which don't overflow are
757 some cases where both hipart0 and highpart1 are 0 or -1. */
758 ops.code = MULT_EXPR;
759 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
760 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
761 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
762 emit_move_insn (res, tem);
763
764 if (!op0_medium_p)
765 {
766 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
767 NULL_RTX, 1, OPTAB_DIRECT);
768 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
769 true, do_error, PROB_VERY_UNLIKELY);
770 }
771
772 if (!op1_medium_p)
773 {
774 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
775 NULL_RTX, 1, OPTAB_DIRECT);
776 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
777 true, do_error, PROB_VERY_UNLIKELY);
778 }
779
780 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
781 same, overflow happened if res is negative, if they are different,
782 overflow happened if res is positive. */
783 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
784 emit_jump (hipart_different);
785 else if (op0_sign == 1 || op1_sign == 1)
786 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
787 true, hipart_different, PROB_EVEN);
788
789 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
790 do_error, PROB_VERY_UNLIKELY);
791 emit_jump (done_label);
792
793 emit_label (hipart_different);
794
795 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
796 do_error, PROB_VERY_UNLIKELY);
797 emit_jump (done_label);
798
799 emit_label (do_overflow);
800
801 /* Overflow, do full multiplication and fallthru into do_error. */
802 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
803 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
804 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
805 emit_move_insn (res, tem);
806 }
807 else
808 {
809 ops.code = MULT_EXPR;
810 ops.type = TREE_TYPE (arg0);
811 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
812 emit_jump (done_label);
813 }
814 }
815
816 emit_label (do_error);
817 /* Expand the ubsan builtin call. */
818 push_temp_slots ();
819 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
820 TREE_TYPE (arg0), arg0, arg1);
821 expand_normal (fn);
822 pop_temp_slots ();
823 do_pending_stack_adjust ();
824
825 /* We're done. */
826 emit_label (done_label);
827
828 if (lhs)
829 emit_move_insn (target, res);
830 }
831
832 /* Expand UBSAN_CHECK_ADD call STMT. */
833
834 static void
835 expand_UBSAN_CHECK_ADD (gimple stmt)
836 {
837 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
838 }
839
840 /* Expand UBSAN_CHECK_SUB call STMT. */
841
842 static void
843 expand_UBSAN_CHECK_SUB (gimple stmt)
844 {
845 if (integer_zerop (gimple_call_arg (stmt, 0)))
846 ubsan_expand_si_overflow_neg_check (stmt);
847 else
848 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
849 }
850
851 /* Expand UBSAN_CHECK_MUL call STMT. */
852
853 static void
854 expand_UBSAN_CHECK_MUL (gimple stmt)
855 {
856 ubsan_expand_si_overflow_mul_check (stmt);
857 }
858
859 /* This should get folded in tree-vectorizer.c. */
860
861 static void
862 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
863 {
864 gcc_unreachable ();
865 }
866
867 static void
868 expand_MASK_LOAD (gimple stmt)
869 {
870 struct expand_operand ops[3];
871 tree type, lhs, rhs, maskt;
872 rtx mem, target, mask;
873
874 maskt = gimple_call_arg (stmt, 2);
875 lhs = gimple_call_lhs (stmt);
876 if (lhs == NULL_TREE)
877 return;
878 type = TREE_TYPE (lhs);
879 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
880 gimple_call_arg (stmt, 1));
881
882 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
883 gcc_assert (MEM_P (mem));
884 mask = expand_normal (maskt);
885 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
886 create_output_operand (&ops[0], target, TYPE_MODE (type));
887 create_fixed_operand (&ops[1], mem);
888 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
889 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
890 }
891
892 static void
893 expand_MASK_STORE (gimple stmt)
894 {
895 struct expand_operand ops[3];
896 tree type, lhs, rhs, maskt;
897 rtx mem, reg, mask;
898
899 maskt = gimple_call_arg (stmt, 2);
900 rhs = gimple_call_arg (stmt, 3);
901 type = TREE_TYPE (rhs);
902 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
903 gimple_call_arg (stmt, 1));
904
905 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
906 gcc_assert (MEM_P (mem));
907 mask = expand_normal (maskt);
908 reg = expand_normal (rhs);
909 create_fixed_operand (&ops[0], mem);
910 create_input_operand (&ops[1], reg, TYPE_MODE (type));
911 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
912 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
913 }
914
915 static void
916 expand_ABNORMAL_DISPATCHER (gimple)
917 {
918 }
919
920 static void
921 expand_BUILTIN_EXPECT (gimple stmt)
922 {
923 /* When guessing was done, the hints should be already stripped away. */
924 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
925
926 rtx target;
927 tree lhs = gimple_call_lhs (stmt);
928 if (lhs)
929 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
930 else
931 target = const0_rtx;
932 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
933 if (lhs && val != target)
934 emit_move_insn (target, val);
935 }
936
937 /* Routines to expand each internal function, indexed by function number.
938 Each routine has the prototype:
939
940 expand_<NAME> (gimple stmt)
941
942 where STMT is the statement that performs the call. */
943 static void (*const internal_fn_expanders[]) (gimple) = {
944 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
945 #include "internal-fn.def"
946 #undef DEF_INTERNAL_FN
947 0
948 };
949
950 /* Expand STMT, which is a call to internal function FN. */
951
952 void
953 expand_internal_call (gimple stmt)
954 {
955 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
956 }