Implement -fsanitize=signed-integer-overflow.
[gcc.git] / gcc / internal-fn.c
1 /* Internal functions.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "internal-fn.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37
38 /* The names of each internal function, indexed by function number. */
39 const char *const internal_fn_name_array[] = {
40 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
41 #include "internal-fn.def"
42 #undef DEF_INTERNAL_FN
43 "<invalid-fn>"
44 };
45
46 /* The ECF_* flags of each internal function, indexed by function number. */
47 const int internal_fn_flags_array[] = {
48 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
49 #include "internal-fn.def"
50 #undef DEF_INTERNAL_FN
51 0
52 };
53
54 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
55 for load-lanes-style optab OPTAB. The insn must exist. */
56
57 static enum insn_code
58 get_multi_vector_move (tree array_type, convert_optab optab)
59 {
60 enum insn_code icode;
61 enum machine_mode imode;
62 enum machine_mode vmode;
63
64 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
65 imode = TYPE_MODE (array_type);
66 vmode = TYPE_MODE (TREE_TYPE (array_type));
67
68 icode = convert_optab_handler (optab, imode, vmode);
69 gcc_assert (icode != CODE_FOR_nothing);
70 return icode;
71 }
72
73 /* Expand LOAD_LANES call STMT. */
74
75 static void
76 expand_LOAD_LANES (gimple stmt)
77 {
78 struct expand_operand ops[2];
79 tree type, lhs, rhs;
80 rtx target, mem;
81
82 lhs = gimple_call_lhs (stmt);
83 rhs = gimple_call_arg (stmt, 0);
84 type = TREE_TYPE (lhs);
85
86 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
87 mem = expand_normal (rhs);
88
89 gcc_assert (MEM_P (mem));
90 PUT_MODE (mem, TYPE_MODE (type));
91
92 create_output_operand (&ops[0], target, TYPE_MODE (type));
93 create_fixed_operand (&ops[1], mem);
94 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
95 }
96
97 /* Expand STORE_LANES call STMT. */
98
99 static void
100 expand_STORE_LANES (gimple stmt)
101 {
102 struct expand_operand ops[2];
103 tree type, lhs, rhs;
104 rtx target, reg;
105
106 lhs = gimple_call_lhs (stmt);
107 rhs = gimple_call_arg (stmt, 0);
108 type = TREE_TYPE (rhs);
109
110 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
111 reg = expand_normal (rhs);
112
113 gcc_assert (MEM_P (target));
114 PUT_MODE (target, TYPE_MODE (type));
115
116 create_fixed_operand (&ops[0], target);
117 create_input_operand (&ops[1], reg, TYPE_MODE (type));
118 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
119 }
120
121 static void
122 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
123 {
124 gcc_unreachable ();
125 }
126
127 /* This should get expanded in adjust_simduid_builtins. */
128
129 static void
130 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
131 {
132 gcc_unreachable ();
133 }
134
135 /* This should get expanded in adjust_simduid_builtins. */
136
137 static void
138 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
139 {
140 gcc_unreachable ();
141 }
142
143 /* This should get expanded in adjust_simduid_builtins. */
144
145 static void
146 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
147 {
148 gcc_unreachable ();
149 }
150
151 /* This should get expanded in the sanopt pass. */
152
153 static void
154 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
155 {
156 gcc_unreachable ();
157 }
158
159 /* Add sub/add overflow checking to the statement STMT.
160 CODE says whether the operation is +, or -. */
161
162 void
163 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
164 {
165 rtx res, op0, op1;
166 tree lhs, fn, arg0, arg1;
167 rtx done_label, do_error, target = NULL_RTX;
168
169 lhs = gimple_call_lhs (stmt);
170 arg0 = gimple_call_arg (stmt, 0);
171 arg1 = gimple_call_arg (stmt, 1);
172 done_label = gen_label_rtx ();
173 do_error = gen_label_rtx ();
174 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
175 TREE_TYPE (arg0), arg0, arg1);
176 do_pending_stack_adjust ();
177 op0 = expand_normal (arg0);
178 op1 = expand_normal (arg1);
179
180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
181 if (lhs)
182 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
183
184 enum insn_code icode
185 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
186 if (icode != CODE_FOR_nothing)
187 {
188 struct expand_operand ops[4];
189 rtx last = get_last_insn ();
190
191 res = gen_reg_rtx (mode);
192 create_output_operand (&ops[0], res, mode);
193 create_input_operand (&ops[1], op0, mode);
194 create_input_operand (&ops[2], op1, mode);
195 create_fixed_operand (&ops[3], do_error);
196 if (maybe_expand_insn (icode, 4, ops))
197 {
198 last = get_last_insn ();
199 if (profile_status != PROFILE_ABSENT
200 && JUMP_P (last)
201 && any_condjump_p (last)
202 && !find_reg_note (last, REG_BR_PROB, 0))
203 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
204 emit_jump (done_label);
205 }
206 else
207 {
208 delete_insns_since (last);
209 icode = CODE_FOR_nothing;
210 }
211 }
212
213 if (icode == CODE_FOR_nothing)
214 {
215 rtx sub_check = gen_label_rtx ();
216
217 /* Compute the operation. On RTL level, the addition is always
218 unsigned. */
219 res = expand_binop (mode, add_optab, op0, op1,
220 NULL_RTX, false, OPTAB_LIB_WIDEN);
221
222 /* If the op1 is negative, we have to use a different check. */
223 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
224 false, sub_check, PROB_EVEN);
225
226 /* Compare the result of the addition with one of the operands. */
227 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
228 NULL_RTX, mode, false, done_label,
229 PROB_VERY_LIKELY);
230 /* If we get here, we have to print the error. */
231 emit_jump (do_error);
232
233 emit_label (sub_check);
234 /* We have k = a + b for b < 0 here. k <= a must hold. */
235 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
236 NULL_RTX, mode, false, done_label,
237 PROB_VERY_LIKELY);
238 }
239
240 emit_label (do_error);
241 /* Expand the ubsan builtin call. */
242 expand_normal (fn);
243 do_pending_stack_adjust ();
244
245 /* We're done. */
246 emit_label (done_label);
247
248 if (lhs)
249 emit_move_insn (target, res);
250 }
251
252 /* Add negate overflow checking to the statement STMT. */
253
254 void
255 ubsan_expand_si_overflow_neg_check (gimple stmt)
256 {
257 rtx res, op1;
258 tree lhs, fn, arg1;
259 rtx done_label, do_error, target = NULL_RTX;
260
261 lhs = gimple_call_lhs (stmt);
262 arg1 = gimple_call_arg (stmt, 1);
263 done_label = gen_label_rtx ();
264 do_error = gen_label_rtx ();
265 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
266 TREE_TYPE (arg1), arg1, NULL_TREE);
267
268 do_pending_stack_adjust ();
269 op1 = expand_normal (arg1);
270
271 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
272 if (lhs)
273 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
274
275 enum insn_code icode = optab_handler (negv3_optab, mode);
276 if (icode != CODE_FOR_nothing)
277 {
278 struct expand_operand ops[3];
279 rtx last = get_last_insn ();
280
281 res = gen_reg_rtx (mode);
282 create_output_operand (&ops[0], res, mode);
283 create_input_operand (&ops[1], op1, mode);
284 create_fixed_operand (&ops[2], do_error);
285 if (maybe_expand_insn (icode, 3, ops))
286 {
287 last = get_last_insn ();
288 if (profile_status != PROFILE_ABSENT
289 && JUMP_P (last)
290 && any_condjump_p (last)
291 && !find_reg_note (last, REG_BR_PROB, 0))
292 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
293 emit_jump (done_label);
294 }
295 else
296 {
297 delete_insns_since (last);
298 icode = CODE_FOR_nothing;
299 }
300 }
301
302 if (icode == CODE_FOR_nothing)
303 {
304 /* Compute the operation. On RTL level, the addition is always
305 unsigned. */
306 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
307
308 /* Compare the operand with the most negative value. */
309 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
310 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
311 done_label, PROB_VERY_LIKELY);
312 }
313
314 emit_label (do_error);
315 /* Expand the ubsan builtin call. */
316 expand_normal (fn);
317 do_pending_stack_adjust ();
318
319 /* We're done. */
320 emit_label (done_label);
321
322 if (lhs)
323 emit_move_insn (target, res);
324 }
325
326 /* Add mul overflow checking to the statement STMT. */
327
328 void
329 ubsan_expand_si_overflow_mul_check (gimple stmt)
330 {
331 rtx res, op0, op1;
332 tree lhs, fn, arg0, arg1;
333 rtx done_label, do_error, target = NULL_RTX;
334
335 lhs = gimple_call_lhs (stmt);
336 arg0 = gimple_call_arg (stmt, 0);
337 arg1 = gimple_call_arg (stmt, 1);
338 done_label = gen_label_rtx ();
339 do_error = gen_label_rtx ();
340 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
341 TREE_TYPE (arg0), arg0, arg1);
342
343 do_pending_stack_adjust ();
344 op0 = expand_normal (arg0);
345 op1 = expand_normal (arg1);
346
347 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
348 if (lhs)
349 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
350
351 enum insn_code icode = optab_handler (mulv4_optab, mode);
352 if (icode != CODE_FOR_nothing)
353 {
354 struct expand_operand ops[4];
355 rtx last = get_last_insn ();
356
357 res = gen_reg_rtx (mode);
358 create_output_operand (&ops[0], res, mode);
359 create_input_operand (&ops[1], op0, mode);
360 create_input_operand (&ops[2], op1, mode);
361 create_fixed_operand (&ops[3], do_error);
362 if (maybe_expand_insn (icode, 4, ops))
363 {
364 last = get_last_insn ();
365 if (profile_status != PROFILE_ABSENT
366 && JUMP_P (last)
367 && any_condjump_p (last)
368 && !find_reg_note (last, REG_BR_PROB, 0))
369 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
370 emit_jump (done_label);
371 }
372 else
373 {
374 delete_insns_since (last);
375 icode = CODE_FOR_nothing;
376 }
377 }
378
379 if (icode == CODE_FOR_nothing)
380 {
381 struct separate_ops ops;
382 ops.op0 = arg0;
383 ops.op1 = arg1;
384 ops.op2 = NULL_TREE;
385 ops.location = gimple_location (stmt);
386 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
387 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
388 {
389 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
390 ops.code = WIDEN_MULT_EXPR;
391 ops.type
392 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
393
394 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
395 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
396 GET_MODE_PRECISION (mode), NULL_RTX, 0);
397 hipart = gen_lowpart (mode, hipart);
398 res = gen_lowpart (mode, res);
399 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
400 GET_MODE_PRECISION (mode) - 1,
401 NULL_RTX, 0);
402 /* RES is low half of the double width result, HIPART
403 the high half. There was overflow if
404 HIPART is different from RES < 0 ? -1 : 0. */
405 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
406 false, done_label, PROB_VERY_LIKELY);
407 }
408 else
409 {
410 /* For now we don't instrument this. See __mulvDI3 in libgcc2.c
411 for what could be done. */
412 ops.code = MULT_EXPR;
413 ops.type = TREE_TYPE (arg0);
414 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
415 emit_jump (done_label);
416 }
417 }
418
419 emit_label (do_error);
420 /* Expand the ubsan builtin call. */
421 expand_normal (fn);
422 do_pending_stack_adjust ();
423
424 /* We're done. */
425 emit_label (done_label);
426
427 if (lhs)
428 emit_move_insn (target, res);
429 }
430
431 /* Expand UBSAN_CHECK_ADD call STMT. */
432
433 static void
434 expand_UBSAN_CHECK_ADD (gimple stmt)
435 {
436 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
437 }
438
439 /* Expand UBSAN_CHECK_SUB call STMT. */
440
441 static void
442 expand_UBSAN_CHECK_SUB (gimple stmt)
443 {
444 if (integer_zerop (gimple_call_arg (stmt, 0)))
445 ubsan_expand_si_overflow_neg_check (stmt);
446 else
447 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
448 }
449
450 /* Expand UBSAN_CHECK_MUL call STMT. */
451
452 static void
453 expand_UBSAN_CHECK_MUL (gimple stmt)
454 {
455 ubsan_expand_si_overflow_mul_check (stmt);
456 }
457
458 /* Routines to expand each internal function, indexed by function number.
459 Each routine has the prototype:
460
461 expand_<NAME> (gimple stmt)
462
463 where STMT is the statement that performs the call. */
464 static void (*const internal_fn_expanders[]) (gimple) = {
465 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
466 #include "internal-fn.def"
467 #undef DEF_INTERNAL_FN
468 0
469 };
470
471 /* Expand STMT, which is a call to internal function FN. */
472
473 void
474 expand_internal_call (gimple stmt)
475 {
476 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
477 }