re PR middle-end/28473 (with -O, casting result of round(x) to uint64_t produces...
[gcc.git] / gcc / convert.c
1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1997, 1998,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22
23 /* These routines are somewhat language-independent utility function
24 intended to be called by the language-specific convert () functions. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "convert.h"
33 #include "toplev.h"
34 #include "langhooks.h"
35 #include "real.h"
36
37 /* Convert EXPR to some pointer or reference type TYPE.
38 EXPR must be pointer, reference, integer, enumeral, or literal zero;
39 in other cases error is called. */
40
41 tree
42 convert_to_pointer (tree type, tree expr)
43 {
44 if (TREE_TYPE (expr) == type)
45 return expr;
46
47 if (integer_zerop (expr))
48 {
49 tree t = build_int_cst (type, 0);
50 if (TREE_OVERFLOW (expr) || TREE_CONSTANT_OVERFLOW (expr))
51 t = force_fit_type (t, 0, TREE_OVERFLOW (expr),
52 TREE_CONSTANT_OVERFLOW (expr));
53 return t;
54 }
55
56 switch (TREE_CODE (TREE_TYPE (expr)))
57 {
58 case POINTER_TYPE:
59 case REFERENCE_TYPE:
60 return fold_build1 (NOP_EXPR, type, expr);
61
62 case INTEGER_TYPE:
63 case ENUMERAL_TYPE:
64 case BOOLEAN_TYPE:
65 if (TYPE_PRECISION (TREE_TYPE (expr)) != POINTER_SIZE)
66 expr = fold_build1 (NOP_EXPR,
67 lang_hooks.types.type_for_size (POINTER_SIZE, 0),
68 expr);
69 return fold_build1 (CONVERT_EXPR, type, expr);
70
71
72 default:
73 error ("cannot convert to a pointer type");
74 return convert_to_pointer (type, integer_zero_node);
75 }
76 }
77
78 /* Avoid any floating point extensions from EXP. */
79 tree
80 strip_float_extensions (tree exp)
81 {
82 tree sub, expt, subt;
83
84 /* For floating point constant look up the narrowest type that can hold
85 it properly and handle it like (type)(narrowest_type)constant.
86 This way we can optimize for instance a=a*2.0 where "a" is float
87 but 2.0 is double constant. */
88 if (TREE_CODE (exp) == REAL_CST)
89 {
90 REAL_VALUE_TYPE orig;
91 tree type = NULL;
92
93 orig = TREE_REAL_CST (exp);
94 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
95 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
96 type = float_type_node;
97 else if (TYPE_PRECISION (TREE_TYPE (exp))
98 > TYPE_PRECISION (double_type_node)
99 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
100 type = double_type_node;
101 if (type)
102 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
103 }
104
105 if (TREE_CODE (exp) != NOP_EXPR
106 && TREE_CODE (exp) != CONVERT_EXPR)
107 return exp;
108
109 sub = TREE_OPERAND (exp, 0);
110 subt = TREE_TYPE (sub);
111 expt = TREE_TYPE (exp);
112
113 if (!FLOAT_TYPE_P (subt))
114 return exp;
115
116 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
117 return exp;
118
119 return strip_float_extensions (sub);
120 }
121
122
123 /* Convert EXPR to some floating-point type TYPE.
124
125 EXPR must be float, integer, or enumeral;
126 in other cases error is called. */
127
128 tree
129 convert_to_real (tree type, tree expr)
130 {
131 enum built_in_function fcode = builtin_mathfn_code (expr);
132 tree itype = TREE_TYPE (expr);
133
134 /* Disable until we figure out how to decide whether the functions are
135 present in runtime. */
136 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
137 if (optimize
138 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
139 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
140 {
141 switch (fcode)
142 {
143 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
144 CASE_MATHFN (ACOS)
145 CASE_MATHFN (ACOSH)
146 CASE_MATHFN (ASIN)
147 CASE_MATHFN (ASINH)
148 CASE_MATHFN (ATAN)
149 CASE_MATHFN (ATANH)
150 CASE_MATHFN (CBRT)
151 CASE_MATHFN (COS)
152 CASE_MATHFN (COSH)
153 CASE_MATHFN (ERF)
154 CASE_MATHFN (ERFC)
155 CASE_MATHFN (EXP)
156 CASE_MATHFN (EXP10)
157 CASE_MATHFN (EXP2)
158 CASE_MATHFN (EXPM1)
159 CASE_MATHFN (FABS)
160 CASE_MATHFN (GAMMA)
161 CASE_MATHFN (J0)
162 CASE_MATHFN (J1)
163 CASE_MATHFN (LGAMMA)
164 CASE_MATHFN (LOG)
165 CASE_MATHFN (LOG10)
166 CASE_MATHFN (LOG1P)
167 CASE_MATHFN (LOG2)
168 CASE_MATHFN (LOGB)
169 CASE_MATHFN (POW10)
170 CASE_MATHFN (SIN)
171 CASE_MATHFN (SINH)
172 CASE_MATHFN (SQRT)
173 CASE_MATHFN (TAN)
174 CASE_MATHFN (TANH)
175 CASE_MATHFN (TGAMMA)
176 CASE_MATHFN (Y0)
177 CASE_MATHFN (Y1)
178 #undef CASE_MATHFN
179 {
180 tree arg0 = strip_float_extensions (TREE_VALUE (TREE_OPERAND (expr, 1)));
181 tree newtype = type;
182
183 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
184 the both as the safe type for operation. */
185 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
186 newtype = TREE_TYPE (arg0);
187
188 /* Be careful about integer to fp conversions.
189 These may overflow still. */
190 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
191 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
192 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
193 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
194 {
195 tree arglist;
196 tree fn = mathfn_built_in (newtype, fcode);
197
198 if (fn)
199 {
200 arglist = build_tree_list (NULL_TREE, fold (convert_to_real (newtype, arg0)));
201 expr = build_function_call_expr (fn, arglist);
202 if (newtype == type)
203 return expr;
204 }
205 }
206 }
207 default:
208 break;
209 }
210 }
211 if (optimize
212 && (((fcode == BUILT_IN_FLOORL
213 || fcode == BUILT_IN_CEILL
214 || fcode == BUILT_IN_ROUNDL
215 || fcode == BUILT_IN_RINTL
216 || fcode == BUILT_IN_TRUNCL
217 || fcode == BUILT_IN_NEARBYINTL)
218 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
219 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
220 || ((fcode == BUILT_IN_FLOOR
221 || fcode == BUILT_IN_CEIL
222 || fcode == BUILT_IN_ROUND
223 || fcode == BUILT_IN_RINT
224 || fcode == BUILT_IN_TRUNC
225 || fcode == BUILT_IN_NEARBYINT)
226 && (TYPE_MODE (type) == TYPE_MODE (float_type_node)))))
227 {
228 tree fn = mathfn_built_in (type, fcode);
229
230 if (fn)
231 {
232 tree arg
233 = strip_float_extensions (TREE_VALUE (TREE_OPERAND (expr, 1)));
234
235 /* Make sure (type)arg0 is an extension, otherwise we could end up
236 changing (float)floor(double d) into floorf((float)d), which is
237 incorrect because (float)d uses round-to-nearest and can round
238 up to the next integer. */
239 if (TYPE_PRECISION (type) >= TYPE_PRECISION (TREE_TYPE (arg)))
240 return
241 build_function_call_expr (fn,
242 build_tree_list (NULL_TREE,
243 fold (convert_to_real (type, arg))));
244 }
245 }
246
247 /* Propagate the cast into the operation. */
248 if (itype != type && FLOAT_TYPE_P (type))
249 switch (TREE_CODE (expr))
250 {
251 /* Convert (float)-x into -(float)x. This is always safe. */
252 case ABS_EXPR:
253 case NEGATE_EXPR:
254 if (TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (expr)))
255 return build1 (TREE_CODE (expr), type,
256 fold (convert_to_real (type,
257 TREE_OPERAND (expr, 0))));
258 break;
259 /* Convert (outertype)((innertype0)a+(innertype1)b)
260 into ((newtype)a+(newtype)b) where newtype
261 is the widest mode from all of these. */
262 case PLUS_EXPR:
263 case MINUS_EXPR:
264 case MULT_EXPR:
265 case RDIV_EXPR:
266 {
267 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
268 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
269
270 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
271 && FLOAT_TYPE_P (TREE_TYPE (arg1)))
272 {
273 tree newtype = type;
274
275 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
276 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode)
277 newtype = dfloat32_type_node;
278 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
279 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode)
280 newtype = dfloat64_type_node;
281 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
282 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode)
283 newtype = dfloat128_type_node;
284 if (newtype == dfloat32_type_node
285 || newtype == dfloat64_type_node
286 || newtype == dfloat128_type_node)
287 {
288 expr = build2 (TREE_CODE (expr), newtype,
289 fold (convert_to_real (newtype, arg0)),
290 fold (convert_to_real (newtype, arg1)));
291 if (newtype == type)
292 return expr;
293 break;
294 }
295
296 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
297 newtype = TREE_TYPE (arg0);
298 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
299 newtype = TREE_TYPE (arg1);
300 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype))
301 {
302 expr = build2 (TREE_CODE (expr), newtype,
303 fold (convert_to_real (newtype, arg0)),
304 fold (convert_to_real (newtype, arg1)));
305 if (newtype == type)
306 return expr;
307 }
308 }
309 }
310 break;
311 default:
312 break;
313 }
314
315 switch (TREE_CODE (TREE_TYPE (expr)))
316 {
317 case REAL_TYPE:
318 /* Ignore the conversion if we don't need to store intermediate
319 results and neither type is a decimal float. */
320 return build1 ((flag_float_store
321 || DECIMAL_FLOAT_TYPE_P (type)
322 || DECIMAL_FLOAT_TYPE_P (itype))
323 ? CONVERT_EXPR : NOP_EXPR, type, expr);
324
325 case INTEGER_TYPE:
326 case ENUMERAL_TYPE:
327 case BOOLEAN_TYPE:
328 return build1 (FLOAT_EXPR, type, expr);
329
330 case COMPLEX_TYPE:
331 return convert (type,
332 fold_build1 (REALPART_EXPR,
333 TREE_TYPE (TREE_TYPE (expr)), expr));
334
335 case POINTER_TYPE:
336 case REFERENCE_TYPE:
337 error ("pointer value used where a floating point value was expected");
338 return convert_to_real (type, integer_zero_node);
339
340 default:
341 error ("aggregate value used where a float was expected");
342 return convert_to_real (type, integer_zero_node);
343 }
344 }
345
346 /* Convert EXPR to some integer (or enum) type TYPE.
347
348 EXPR must be pointer, integer, discrete (enum, char, or bool), float, or
349 vector; in other cases error is called.
350
351 The result of this is always supposed to be a newly created tree node
352 not in use in any existing structure. */
353
354 tree
355 convert_to_integer (tree type, tree expr)
356 {
357 enum tree_code ex_form = TREE_CODE (expr);
358 tree intype = TREE_TYPE (expr);
359 unsigned int inprec = TYPE_PRECISION (intype);
360 unsigned int outprec = TYPE_PRECISION (type);
361
362 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
363 be. Consider `enum E = { a, b = (enum E) 3 };'. */
364 if (!COMPLETE_TYPE_P (type))
365 {
366 error ("conversion to incomplete type");
367 return error_mark_node;
368 }
369
370 /* Convert e.g. (long)round(d) -> lround(d). */
371 /* If we're converting to char, we may encounter differing behavior
372 between converting from double->char vs double->long->char.
373 We're in "undefined" territory but we prefer to be conservative,
374 so only proceed in "unsafe" math mode. */
375 if (optimize
376 && (flag_unsafe_math_optimizations
377 || (long_integer_type_node
378 && outprec >= TYPE_PRECISION (long_integer_type_node))))
379 {
380 tree s_expr = strip_float_extensions (expr);
381 tree s_intype = TREE_TYPE (s_expr);
382 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
383 tree fn = 0;
384
385 switch (fcode)
386 {
387 CASE_FLT_FN (BUILT_IN_CEIL):
388 /* Only convert in ISO C99 mode. */
389 if (!TARGET_C99_FUNCTIONS)
390 break;
391 if (outprec < TYPE_PRECISION (long_integer_type_node)
392 || (outprec == TYPE_PRECISION (long_integer_type_node)
393 && !TYPE_UNSIGNED (type)))
394 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
395 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
396 && !TYPE_UNSIGNED (type))
397 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
398 break;
399
400 CASE_FLT_FN (BUILT_IN_FLOOR):
401 /* Only convert in ISO C99 mode. */
402 if (!TARGET_C99_FUNCTIONS)
403 break;
404 if (outprec < TYPE_PRECISION (long_integer_type_node)
405 || (outprec == TYPE_PRECISION (long_integer_type_node)
406 && !TYPE_UNSIGNED (type)))
407 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
408 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
409 && !TYPE_UNSIGNED (type))
410 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
411 break;
412
413 CASE_FLT_FN (BUILT_IN_ROUND):
414 if (outprec < TYPE_PRECISION (long_integer_type_node)
415 || (outprec == TYPE_PRECISION (long_integer_type_node)
416 && !TYPE_UNSIGNED (type)))
417 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
418 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
419 && !TYPE_UNSIGNED (type))
420 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
421 break;
422
423 CASE_FLT_FN (BUILT_IN_RINT):
424 /* Only convert rint* if we can ignore math exceptions. */
425 if (flag_trapping_math)
426 break;
427 /* ... Fall through ... */
428 CASE_FLT_FN (BUILT_IN_NEARBYINT):
429 if (outprec < TYPE_PRECISION (long_integer_type_node)
430 || (outprec == TYPE_PRECISION (long_integer_type_node)
431 && !TYPE_UNSIGNED (type)))
432 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
433 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
434 && !TYPE_UNSIGNED (type))
435 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
436 break;
437
438 CASE_FLT_FN (BUILT_IN_TRUNC):
439 {
440 tree arglist = TREE_OPERAND (s_expr, 1);
441 return convert_to_integer (type, TREE_VALUE (arglist));
442 }
443
444 default:
445 break;
446 }
447
448 if (fn)
449 {
450 tree arglist = TREE_OPERAND (s_expr, 1);
451 tree newexpr = build_function_call_expr (fn, arglist);
452 return convert_to_integer (type, newexpr);
453 }
454 }
455
456 switch (TREE_CODE (intype))
457 {
458 case POINTER_TYPE:
459 case REFERENCE_TYPE:
460 if (integer_zerop (expr))
461 return build_int_cst (type, 0);
462
463 /* Convert to an unsigned integer of the correct width first,
464 and from there widen/truncate to the required type. */
465 expr = fold_build1 (CONVERT_EXPR,
466 lang_hooks.types.type_for_size (POINTER_SIZE, 0),
467 expr);
468 return fold_convert (type, expr);
469
470 case INTEGER_TYPE:
471 case ENUMERAL_TYPE:
472 case BOOLEAN_TYPE:
473 /* If this is a logical operation, which just returns 0 or 1, we can
474 change the type of the expression. */
475
476 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
477 {
478 expr = copy_node (expr);
479 TREE_TYPE (expr) = type;
480 return expr;
481 }
482
483 /* If we are widening the type, put in an explicit conversion.
484 Similarly if we are not changing the width. After this, we know
485 we are truncating EXPR. */
486
487 else if (outprec >= inprec)
488 {
489 enum tree_code code;
490
491 /* If the precision of the EXPR's type is K bits and the
492 destination mode has more bits, and the sign is changing,
493 it is not safe to use a NOP_EXPR. For example, suppose
494 that EXPR's type is a 3-bit unsigned integer type, the
495 TYPE is a 3-bit signed integer type, and the machine mode
496 for the types is 8-bit QImode. In that case, the
497 conversion necessitates an explicit sign-extension. In
498 the signed-to-unsigned case the high-order bits have to
499 be cleared. */
500 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
501 && (TYPE_PRECISION (TREE_TYPE (expr))
502 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr)))))
503 code = CONVERT_EXPR;
504 else
505 code = NOP_EXPR;
506
507 return fold_build1 (code, type, expr);
508 }
509
510 /* If TYPE is an enumeral type or a type with a precision less
511 than the number of bits in its mode, do the conversion to the
512 type corresponding to its mode, then do a nop conversion
513 to TYPE. */
514 else if (TREE_CODE (type) == ENUMERAL_TYPE
515 || outprec != GET_MODE_BITSIZE (TYPE_MODE (type)))
516 return build1 (NOP_EXPR, type,
517 convert (lang_hooks.types.type_for_mode
518 (TYPE_MODE (type), TYPE_UNSIGNED (type)),
519 expr));
520
521 /* Here detect when we can distribute the truncation down past some
522 arithmetic. For example, if adding two longs and converting to an
523 int, we can equally well convert both to ints and then add.
524 For the operations handled here, such truncation distribution
525 is always safe.
526 It is desirable in these cases:
527 1) when truncating down to full-word from a larger size
528 2) when truncating takes no work.
529 3) when at least one operand of the arithmetic has been extended
530 (as by C's default conversions). In this case we need two conversions
531 if we do the arithmetic as already requested, so we might as well
532 truncate both and then combine. Perhaps that way we need only one.
533
534 Note that in general we cannot do the arithmetic in a type
535 shorter than the desired result of conversion, even if the operands
536 are both extended from a shorter type, because they might overflow
537 if combined in that type. The exceptions to this--the times when
538 two narrow values can be combined in their narrow type even to
539 make a wider result--are handled by "shorten" in build_binary_op. */
540
541 switch (ex_form)
542 {
543 case RSHIFT_EXPR:
544 /* We can pass truncation down through right shifting
545 when the shift count is a nonpositive constant. */
546 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
547 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
548 goto trunc1;
549 break;
550
551 case LSHIFT_EXPR:
552 /* We can pass truncation down through left shifting
553 when the shift count is a nonnegative constant and
554 the target type is unsigned. */
555 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
556 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
557 && TYPE_UNSIGNED (type)
558 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
559 {
560 /* If shift count is less than the width of the truncated type,
561 really shift. */
562 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
563 /* In this case, shifting is like multiplication. */
564 goto trunc1;
565 else
566 {
567 /* If it is >= that width, result is zero.
568 Handling this with trunc1 would give the wrong result:
569 (int) ((long long) a << 32) is well defined (as 0)
570 but (int) a << 32 is undefined and would get a
571 warning. */
572
573 tree t = build_int_cst (type, 0);
574
575 /* If the original expression had side-effects, we must
576 preserve it. */
577 if (TREE_SIDE_EFFECTS (expr))
578 return build2 (COMPOUND_EXPR, type, expr, t);
579 else
580 return t;
581 }
582 }
583 break;
584
585 case MAX_EXPR:
586 case MIN_EXPR:
587 case MULT_EXPR:
588 {
589 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
590 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
591
592 /* Don't distribute unless the output precision is at least as big
593 as the actual inputs. Otherwise, the comparison of the
594 truncated values will be wrong. */
595 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
596 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
597 /* If signedness of arg0 and arg1 don't match,
598 we can't necessarily find a type to compare them in. */
599 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
600 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
601 goto trunc1;
602 break;
603 }
604
605 case PLUS_EXPR:
606 case MINUS_EXPR:
607 case BIT_AND_EXPR:
608 case BIT_IOR_EXPR:
609 case BIT_XOR_EXPR:
610 trunc1:
611 {
612 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
613 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
614
615 if (outprec >= BITS_PER_WORD
616 || TRULY_NOOP_TRUNCATION (outprec, inprec)
617 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
618 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
619 {
620 /* Do the arithmetic in type TYPEX,
621 then convert result to TYPE. */
622 tree typex = type;
623
624 /* Can't do arithmetic in enumeral types
625 so use an integer type that will hold the values. */
626 if (TREE_CODE (typex) == ENUMERAL_TYPE)
627 typex = lang_hooks.types.type_for_size
628 (TYPE_PRECISION (typex), TYPE_UNSIGNED (typex));
629
630 /* But now perhaps TYPEX is as wide as INPREC.
631 In that case, do nothing special here.
632 (Otherwise would recurse infinitely in convert. */
633 if (TYPE_PRECISION (typex) != inprec)
634 {
635 /* Don't do unsigned arithmetic where signed was wanted,
636 or vice versa.
637 Exception: if both of the original operands were
638 unsigned then we can safely do the work as unsigned.
639 Exception: shift operations take their type solely
640 from the first argument.
641 Exception: the LSHIFT_EXPR case above requires that
642 we perform this operation unsigned lest we produce
643 signed-overflow undefinedness.
644 And we may need to do it as unsigned
645 if we truncate to the original size. */
646 if (TYPE_UNSIGNED (TREE_TYPE (expr))
647 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
648 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
649 || ex_form == LSHIFT_EXPR
650 || ex_form == RSHIFT_EXPR
651 || ex_form == LROTATE_EXPR
652 || ex_form == RROTATE_EXPR))
653 || ex_form == LSHIFT_EXPR
654 /* If we have !flag_wrapv, and either ARG0 or
655 ARG1 is of a signed type, we have to do
656 PLUS_EXPR or MINUS_EXPR in an unsigned
657 type. Otherwise, we would introduce
658 signed-overflow undefinedness. */
659 || (!flag_wrapv
660 && (ex_form == PLUS_EXPR
661 || ex_form == MINUS_EXPR)
662 && (!TYPE_UNSIGNED (TREE_TYPE (arg0))
663 || !TYPE_UNSIGNED (TREE_TYPE (arg1)))))
664 typex = lang_hooks.types.unsigned_type (typex);
665 else
666 typex = lang_hooks.types.signed_type (typex);
667 return convert (type,
668 fold_build2 (ex_form, typex,
669 convert (typex, arg0),
670 convert (typex, arg1)));
671 }
672 }
673 }
674 break;
675
676 case NEGATE_EXPR:
677 case BIT_NOT_EXPR:
678 /* This is not correct for ABS_EXPR,
679 since we must test the sign before truncation. */
680 {
681 tree typex;
682
683 /* Don't do unsigned arithmetic where signed was wanted,
684 or vice versa. */
685 if (TYPE_UNSIGNED (TREE_TYPE (expr)))
686 typex = lang_hooks.types.unsigned_type (type);
687 else
688 typex = lang_hooks.types.signed_type (type);
689 return convert (type,
690 fold_build1 (ex_form, typex,
691 convert (typex,
692 TREE_OPERAND (expr, 0))));
693 }
694
695 case NOP_EXPR:
696 /* Don't introduce a
697 "can't convert between vector values of different size" error. */
698 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == VECTOR_TYPE
699 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (expr, 0))))
700 != GET_MODE_SIZE (TYPE_MODE (type))))
701 break;
702 /* If truncating after truncating, might as well do all at once.
703 If truncating after extending, we may get rid of wasted work. */
704 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
705
706 case COND_EXPR:
707 /* It is sometimes worthwhile to push the narrowing down through
708 the conditional and never loses. */
709 return fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
710 convert (type, TREE_OPERAND (expr, 1)),
711 convert (type, TREE_OPERAND (expr, 2)));
712
713 default:
714 break;
715 }
716
717 return build1 (CONVERT_EXPR, type, expr);
718
719 case REAL_TYPE:
720 return build1 (FIX_TRUNC_EXPR, type, expr);
721
722 case COMPLEX_TYPE:
723 return convert (type,
724 fold_build1 (REALPART_EXPR,
725 TREE_TYPE (TREE_TYPE (expr)), expr));
726
727 case VECTOR_TYPE:
728 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
729 {
730 error ("can't convert between vector values of different size");
731 return error_mark_node;
732 }
733 return build1 (VIEW_CONVERT_EXPR, type, expr);
734
735 default:
736 error ("aggregate value used where an integer was expected");
737 return convert (type, integer_zero_node);
738 }
739 }
740
741 /* Convert EXPR to the complex type TYPE in the usual ways. */
742
743 tree
744 convert_to_complex (tree type, tree expr)
745 {
746 tree subtype = TREE_TYPE (type);
747
748 switch (TREE_CODE (TREE_TYPE (expr)))
749 {
750 case REAL_TYPE:
751 case INTEGER_TYPE:
752 case ENUMERAL_TYPE:
753 case BOOLEAN_TYPE:
754 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
755 convert (subtype, integer_zero_node));
756
757 case COMPLEX_TYPE:
758 {
759 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
760
761 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
762 return expr;
763 else if (TREE_CODE (expr) == COMPLEX_EXPR)
764 return fold_build2 (COMPLEX_EXPR, type,
765 convert (subtype, TREE_OPERAND (expr, 0)),
766 convert (subtype, TREE_OPERAND (expr, 1)));
767 else
768 {
769 expr = save_expr (expr);
770 return
771 fold_build2 (COMPLEX_EXPR, type,
772 convert (subtype,
773 fold_build1 (REALPART_EXPR,
774 TREE_TYPE (TREE_TYPE (expr)),
775 expr)),
776 convert (subtype,
777 fold_build1 (IMAGPART_EXPR,
778 TREE_TYPE (TREE_TYPE (expr)),
779 expr)));
780 }
781 }
782
783 case POINTER_TYPE:
784 case REFERENCE_TYPE:
785 error ("pointer value used where a complex was expected");
786 return convert_to_complex (type, integer_zero_node);
787
788 default:
789 error ("aggregate value used where a complex was expected");
790 return convert_to_complex (type, integer_zero_node);
791 }
792 }
793
794 /* Convert EXPR to the vector type TYPE in the usual ways. */
795
796 tree
797 convert_to_vector (tree type, tree expr)
798 {
799 switch (TREE_CODE (TREE_TYPE (expr)))
800 {
801 case INTEGER_TYPE:
802 case VECTOR_TYPE:
803 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
804 {
805 error ("can't convert between vector values of different size");
806 return error_mark_node;
807 }
808 return build1 (VIEW_CONVERT_EXPR, type, expr);
809
810 default:
811 error ("can't convert value to a vector");
812 return error_mark_node;
813 }
814 }